From fa4588a3700cc542c37df9bf82e2235464ae1bfe Mon Sep 17 00:00:00 2001 From: Stefan Blumentrath Date: Fri, 1 Oct 2021 10:07:38 +0200 Subject: [PATCH 1/3] Close netcdf dataset after getting its size --- thredds_crawler/crawl.py | 1 + 1 file changed, 1 insertion(+) diff --git a/thredds_crawler/crawl.py b/thredds_crawler/crawl.py index 61b123b..444705a 100644 --- a/thredds_crawler/crawl.py +++ b/thredds_crawler/crawl.py @@ -337,6 +337,7 @@ def size(self): for vname in nc.variables: var = nc.variables.get(vname) bites += var.dtype.itemsize * var.size + nc.close() return bites * 1e-6 # Megabytes except ImportError: logger.error("The python-netcdf4 library is required for computing the size of this dataset.") From e1fd6b4afc390ac4e7743a0138f6692193d9f007 Mon Sep 17 00:00:00 2001 From: ninsbl Date: Sun, 3 Oct 2021 22:45:39 +0200 Subject: [PATCH 2/3] close connections --- thredds_crawler/crawl.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/thredds_crawler/crawl.py b/thredds_crawler/crawl.py index 444705a..7fc6b3d 100644 --- a/thredds_crawler/crawl.py +++ b/thredds_crawler/crawl.py @@ -38,7 +38,7 @@ def request_xml(url, auth=None): :param str url: URL for the resource to load as an XML ''' try: - r = requests.get(url, auth=auth, verify=False) + r = requests.get(url, auth=auth, verify=False, headers={"Connection": "close"}) return r.text.encode('utf-8') except BaseException: logger.error("Skipping %s (error parsing the XML)" % url) @@ -257,7 +257,7 @@ def __init__(self, dataset_url, auth=None): self.data_size = None # Get an etree object - r = requests.get(dataset_url, auth=auth, verify=False) + r = requests.get(dataset_url, auth=auth, verify=False, headers={"Connection": "close"}) try: tree = etree.XML(r.text.encode('utf-8')) except etree.XMLSyntaxError: @@ -337,7 +337,6 @@ def size(self): for vname in nc.variables: var = nc.variables.get(vname) bites += var.dtype.itemsize * var.size - nc.close() return bites * 1e-6 # Megabytes except ImportError: logger.error("The python-netcdf4 library is required for computing the size of this dataset.") From d1abeff3efcf028cffb128c569fd130552e39f16 Mon Sep 17 00:00:00 2001 From: ninsbl Date: Sun, 3 Oct 2021 22:48:45 +0200 Subject: [PATCH 3/3] still close nc --- thredds_crawler/crawl.py | 1 + 1 file changed, 1 insertion(+) diff --git a/thredds_crawler/crawl.py b/thredds_crawler/crawl.py index 7fc6b3d..61dd09c 100644 --- a/thredds_crawler/crawl.py +++ b/thredds_crawler/crawl.py @@ -337,6 +337,7 @@ def size(self): for vname in nc.variables: var = nc.variables.get(vname) bites += var.dtype.itemsize * var.size + nc.close() return bites * 1e-6 # Megabytes except ImportError: logger.error("The python-netcdf4 library is required for computing the size of this dataset.")