diff --git a/api/failing.json b/api/failing.json index 05c8b2963f..dc8be76aee 100644 --- a/api/failing.json +++ b/api/failing.json @@ -1 +1 @@ -[{"council_id":"AGB","missing":false,"latest_run":{"status_code":1,"log_text":"[11:00:43] Fetching Scraper for: AGB handlers.py:23\n Begin attempting to scrape: AGB handlers.py:27\n Deleting existing data... base.py:239\n[11:00:44] Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n ...found 35 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n ...found 35 files in Councillors/raw base.py:207\n ...found 71 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 71 files base.py:216\n[11:00:45] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.argyll-bute.gov.uk/councillor_list \n[11:00:50] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/argyll-and-bute-council/cou \n ncillors-directory/south-kintyre/councillor/john-armour \n[11:00:51] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/argyll-and-bute-council/cou \n ncillors-directory/cowal/councillor/gordon-blair \n[11:00:53] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/mid-argyll/councillor/jan-brown \n[11:00:54] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/helensburgh-and-lomond-south/councillor/math-campbel \n l-sturgess \n[11:00:56] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/argyll-and-bute-council/cou \n ncillors-directory/mid-argyll/councillor/garret-corner \n[11:00:57] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/lomond-north/councillor/maurice-corry \n[11:00:58] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/kintyre-and-islands/councillor/robin-currie \n[11:01:00] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/dunoon/councillor/audrey-e-forrest \n[11:01:01] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-north-and-lorn/councillor/kieron-green \n[11:01:03] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-south-and-isles/councillor/amanda-hampsey \n[11:01:04] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/dunoon/councillor/daniel-hampsey \n[11:01:05] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/helensburgh-central/councillor/graham-archibald-hard \n ie \n[11:01:07] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/councillor_list?page=1 \n[11:01:10] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/helensburgh-central/councillor/fiona-howard \n[11:01:11] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-south-and-isles/councillor/willie-hume \n[11:01:13] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/lomond-north/councillor/mark-irvine \n[11:01:14] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-south-and-isles/councillor/andrew-kain \n[11:01:15] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/south-kintyre/councillor/jennifer-kelly \n[11:01:17] 'NoneType' object has no attribute 'img' handlers.py:36\n Committing batch 1 consisting of 32 files base.py:274\n[11:01:18] Finished attempting to scrape: AGB base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 52, in run\n councillor = self.get_single_councillor(councillor_html)\n File \"scrapers/AGB-argyll-and-bute/councillors.py\", line 37, in get_single_councillor\n councillor.photo_url = soup.select_one(\".field--name-field-photo\").img[\"src\"]\nAttributeError: 'NoneType' object has no attribute 'img'\n","start":"2023-11-07 11:00:43.202526","end":"2023-11-07 11:01:18.607255","duration":35}},{"council_id":"ELS","missing":false,"latest_run":{"status_code":1,"log_text":"[09:37:30] Fetching Scraper for: ELS handlers.py:23\n Begin attempting to scrape: ELS handlers.py:27\n[09:37:31] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n[09:37:32] ...found 29 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n ...found 29 files in Councillors/raw base.py:207\n ...found 59 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 59 files base.py:216\n[09:37:33] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.cne-siar.gov.uk/your-council/wards-and-council \n lors/council-members/ \n[09:37:43] HTTPSConnectionPool(host='www.cne-siar.gov.uk', handlers.py:36\n port=443): Max retries exceeded with url: \n /your-council/wards-and-councillors/council-members/ \n (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno -2] Name or service not known')) \n Finished attempting to scrape: ELS base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 72, in create_connection\n for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):\n File \"/var/lang/lib/python3.8/socket.py\", line 918, in getaddrinfo\n for res in _socket.getaddrinfo(host, port, family, type, proto, flags):\nsocket.gaierror: [Errno -2] Name or service not known\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 363, in connect\n self.sock = conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno -2] Name or service not known\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='www.cne-siar.gov.uk', port=443): Max retries exceeded with url: /your-council/wards-and-councillors/council-members/ (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -2] Name or service not known'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 144, in get_list_container\n self.base_url_soup = self.get_page(self.base_url)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 131, in get_page\n page = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPSConnectionPool(host='www.cne-siar.gov.uk', port=443): Max retries exceeded with url: /your-council/wards-and-councillors/council-members/ (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -2] Name or service not known'))\n","start":"2023-11-07 09:37:30.747395","end":"2023-11-07 09:37:43.621872","duration":12}},{"council_id":"LUT","missing":false,"latest_run":{"status_code":1,"log_text":"[10:41:46] Fetching Scraper for: LUT handlers.py:23\n Begin attempting to scrape: LUT handlers.py:27\n Deleting existing data... base.py:239\n[10:41:47] Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n ...found 48 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n[10:41:48] ...found 48 files in Councillors/raw base.py:207\n ...found 97 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 97 files base.py:216\n[10:41:49] ...data deleted. base.py:246\n Scraping from base.py:42\n http://democracy.luton.gov.uk/cmis5public/Councillors.aspx \n[10:43:29] 524 Server Error: for url: handlers.py:36\n https://democracy.luton.gov.uk/cmis5public/Councillors \n .aspx \n Finished attempting to scrape: LUT base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 251, in get_councillors\n req = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 524 Server Error: for url: https://democracy.luton.gov.uk/cmis5public/Councillors.aspx\n","start":"2023-11-07 10:41:46.565814","end":"2023-11-07 10:43:29.636083","duration":103}},{"council_id":"MDB","missing":false,"latest_run":{"status_code":1,"log_text":"[10:58:02] Fetching Scraper for: MDB handlers.py:23\n Begin attempting to scrape: MDB handlers.py:27\n[10:58:03] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n ...found 47 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n[10:58:04] ...found 47 files in Councillors/raw base.py:207\n ...found 95 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 95 files base.py:216\n[10:58:05] ...data deleted. base.py:246\n Scraping from base.py:42\n https://moderngov.middlesbrough.gov.uk/mgWebService.asmx/G \n etCouncillorsByWard \n[11:00:14] HTTPSConnectionPool(host='moderngov.middlesbrough.gov. handlers.py:36\n uk', port=443): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno 110] Connection timed out')) \n[11:00:15] Finished attempting to scrape: MDB base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 95, in create_connection\n raise err\n File \"/opt/python/urllib3/util/connection.py\", line 85, in create_connection\n sock.connect(sa)\nTimeoutError: [Errno 110] Connection timed out\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 363, in connect\n self.sock = conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 110] Connection timed out\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='moderngov.middlesbrough.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 110] Connection timed out'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPSConnectionPool(host='moderngov.middlesbrough.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 110] Connection timed out'))\n","start":"2023-11-07 10:58:02.764188","end":"2023-11-07 11:00:15.503119","duration":132}},{"council_id":"OAD","missing":false,"latest_run":{"status_code":1,"log_text":"[11:02:50] Fetching Scraper for: OAD handlers.py:23\n Begin attempting to scrape: OAD handlers.py:27\n Deleting existing data... base.py:239\n[11:02:51] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[11:02:52] ...data deleted. base.py:246\n Scraping from base.py:42\n http://moderngov.oadby-wigston.gov.uk/mgWebService.asmx/Ge \n tCouncillorsByWard \n ('Connection aborted.', ConnectionResetError(104, handlers.py:36\n 'Connection reset by peer')) \n Finished attempting to scrape: OAD base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 467, in _make_request\n six.raise_from(e, None)\n File \"\", line 3, in raise_from\n File \"/opt/python/urllib3/connectionpool.py\", line 462, in _make_request\n httplib_response = conn.getresponse()\n File \"/var/lang/lib/python3.8/http/client.py\", line 1348, in getresponse\n response.begin()\n File \"/var/lang/lib/python3.8/http/client.py\", line 316, in begin\n version, status, reason = self._read_status()\n File \"/var/lang/lib/python3.8/http/client.py\", line 277, in _read_status\n line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n File \"/var/lang/lib/python3.8/socket.py\", line 669, in readinto\n return self._sock.recv_into(b)\nConnectionResetError: [Errno 104] Connection reset by peer\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 550, in increment\n raise six.reraise(type(error), error, _stacktrace)\n File \"/opt/python/urllib3/packages/six.py\", line 769, in reraise\n raise value.with_traceback(tb)\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 467, in _make_request\n six.raise_from(e, None)\n File \"\", line 3, in raise_from\n File \"/opt/python/urllib3/connectionpool.py\", line 462, in _make_request\n httplib_response = conn.getresponse()\n File \"/var/lang/lib/python3.8/http/client.py\", line 1348, in getresponse\n response.begin()\n File \"/var/lang/lib/python3.8/http/client.py\", line 316, in begin\n version, status, reason = self._read_status()\n File \"/var/lang/lib/python3.8/http/client.py\", line 277, in _read_status\n line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n File \"/var/lang/lib/python3.8/socket.py\", line 669, in readinto\n return self._sock.recv_into(b)\nurllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 501, in send\n raise ConnectionError(err, request=request)\nrequests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n","start":"2023-11-07 11:02:50.247540","end":"2023-11-07 11:02:52.727063","duration":2}},{"council_id":"ORK","missing":false,"latest_run":{"status_code":1,"log_text":"[11:04:57] Fetching Scraper for: ORK handlers.py:23\n Begin attempting to scrape: ORK handlers.py:27\n Deleting existing data... base.py:239\n[11:04:58] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[11:04:59] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.orkney.gov.uk/Council/Councillors/councillor-p \n rofiles.htm \n 404 Client Error: Not Found for url: handlers.py:36\n https://www.orkney.gov.uk/Council/Councillors/councill \n or-profiles.htm \n Finished attempting to scrape: ORK base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 144, in get_list_container\n self.base_url_soup = self.get_page(self.base_url)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 131, in get_page\n page = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://www.orkney.gov.uk/Council/Councillors/councillor-profiles.htm\n","start":"2023-11-07 11:04:57.465868","end":"2023-11-07 11:04:59.886228","duration":2}},{"council_id":"SFT","missing":false,"latest_run":{"status_code":1,"log_text":"[08:22:31] Fetching Scraper for: SFT handlers.py:23\n[08:22:32] Begin attempting to scrape: SFT handlers.py:27\n Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n[08:22:33] ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[08:22:34] ...data deleted. base.py:246\n Scraping from base.py:42\n http://modgov.sefton.gov.uk/mgWebService.asmx/GetCouncillo \n rsByWard \n HTTPSConnectionPool(host='modgov.sefton.gov.uk', handlers.py:36\n port=443): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n SSLError(SSLCertVerificationError(1, '[SSL: \n CERTIFICATE_VERIFY_FAILED] certificate verify failed: \n unable to get local issuer certificate \n (_ssl.c:1131)'))) \n Finished attempting to scrape: SFT base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 419, in connect\n self.sock = ssl_wrap_socket(\n File \"/opt/python/urllib3/util/ssl_.py\", line 449, in ssl_wrap_socket\n ssl_sock = _ssl_wrap_socket_impl(\n File \"/opt/python/urllib3/util/ssl_.py\", line 493, in _ssl_wrap_socket_impl\n return ssl_context.wrap_socket(sock, server_hostname=server_hostname)\n File \"/var/lang/lib/python3.8/ssl.py\", line 500, in wrap_socket\n return self.sslsocket_class._create(\n File \"/var/lang/lib/python3.8/ssl.py\", line 1073, in _create\n self.do_handshake()\n File \"/var/lang/lib/python3.8/ssl.py\", line 1342, in do_handshake\n self._sslobj.do_handshake()\nssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1131)\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='modgov.sefton.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1131)')))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 725, in send\n history = [resp for resp in gen]\n File \"/opt/python/requests/sessions.py\", line 725, in \n history = [resp for resp in gen]\n File \"/opt/python/requests/sessions.py\", line 266, in resolve_redirects\n resp = self.send(\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 517, in send\n raise SSLError(e, request=request)\nrequests.exceptions.SSLError: HTTPSConnectionPool(host='modgov.sefton.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1131)')))\n","start":"2023-11-07 08:22:31.995437","end":"2023-11-07 08:22:34.566971","duration":2}},{"council_id":"SHE","missing":false,"latest_run":{"status_code":null,"log_text":"[11:28:20] Fetching Scraper for: SHE handlers.py:22\n Begin attempting to scrape: SHE handlers.py:25\n Deleting existing data... base.py:234\n Getting all files in SHE... base.py:186\n[11:28:21] Getting all files in SHE/json... base.py:186\n ...found 30 files in SHE/json base.py:202\n Getting all files in SHE/raw... base.py:186\n ...found 30 files in SHE/raw base.py:202\n ...found 61 files in SHE base.py:202\n Deleting batch no. 1 consisting of 61 files base.py:211\n[11:28:32] An error occurred (ThrottlingException) when calling handlers.py:34\n the CreateCommit operation (reached max retries: 4): \n Rate exceeded \n Finished attempting to scrape: SHE base.py:319\n","errors":"An error occurred (ThrottlingException) when calling the CreateCommit operation (reached max retries: 4): Rate exceeded","start":"2022-04-04 11:28:20.509898","end":"2022-04-04 11:28:32.871624","duration":12}},{"council_id":"SHN","missing":false,"latest_run":{"status_code":1,"log_text":"[09:43:47] Fetching Scraper for: SHN handlers.py:23\n Begin attempting to scrape: SHN handlers.py:27\n Deleting existing data... base.py:239\n[09:43:48] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[09:43:49] ...data deleted. base.py:246\n Scraping from base.py:42\n http://moderngov.sthelens.gov.uk/mgWebService.asmx/GetCoun \n cillorsByWard \n HTTPConnectionPool(host='moderngov.sthelens.gov.uk', handlers.py:36\n port=80): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno 113] No route to host')) \n Finished attempting to scrape: SHN base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 95, in create_connection\n raise err\n File \"/opt/python/urllib3/util/connection.py\", line 85, in create_connection\n sock.connect(sa)\nOSError: [Errno 113] No route to host\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 416, in _make_request\n conn.request(method, url, **httplib_request_kw)\n File \"/opt/python/urllib3/connection.py\", line 244, in request\n super(HTTPConnection, self).request(method, url, body=body, headers=headers)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1256, in request\n self._send_request(method, url, body, headers, encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1302, in _send_request\n self.endheaders(body, encode_chunked=encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1251, in endheaders\n self._send_output(message_body, encode_chunked=encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1011, in _send_output\n self.send(msg)\n File \"/var/lang/lib/python3.8/http/client.py\", line 951, in send\n self.connect()\n File \"/opt/python/urllib3/connection.py\", line 205, in connect\n conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 113] No route to host\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='moderngov.sthelens.gov.uk', port=80): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 113] No route to host'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPConnectionPool(host='moderngov.sthelens.gov.uk', port=80): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 113] No route to host'))\n","start":"2023-11-07 09:43:47.421667","end":"2023-11-07 09:43:49.900883","duration":2}},{"council_id":"SNO","missing":false,"latest_run":{"status_code":1,"log_text":"[10:41:18] Fetching Scraper for: SNO handlers.py:23\n Begin attempting to scrape: SNO handlers.py:27\n Deleting existing data... base.py:239\n[10:41:19] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[10:41:20] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.southnorfolkandbroadland.gov.uk/directory/3/so \n uth-norfolk-councillor-directory/category/11 \n 404 Client Error: Not Found for url: handlers.py:36\n https://www.southnorfolkandbroadland.gov.uk/directory/ \n 3/south-norfolk-councillor-directory/category/11 \n Finished attempting to scrape: SNO base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 144, in get_list_container\n self.base_url_soup = self.get_page(self.base_url)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 131, in get_page\n page = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://www.southnorfolkandbroadland.gov.uk/directory/3/south-norfolk-councillor-directory/category/11\n","start":"2023-11-07 10:41:18.405626","end":"2023-11-07 10:41:20.703394","duration":2}},{"council_id":"SST","missing":false,"latest_run":{"status_code":1,"log_text":"[10:43:34] Fetching Scraper for: SST handlers.py:23\n Begin attempting to scrape: SST handlers.py:27\n Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n[10:43:35] ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[10:43:36] ...data deleted. base.py:246\n Scraping from base.py:42\n https://services.sstaffs.gov.uk/cmis/Councillors.aspx \n 404 Client Error: Not Found for url: handlers.py:36\n https://services.sstaffs.gov.uk/cmis/Councillors.aspx \n Finished attempting to scrape: SST base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 251, in get_councillors\n req = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://services.sstaffs.gov.uk/cmis/Councillors.aspx\n","start":"2023-11-07 10:43:34.055159","end":"2023-11-07 10:43:36.535796","duration":2}},{"council_id":"STG","missing":false,"latest_run":{"status_code":1,"log_text":"[09:08:43] Fetching Scraper for: STG handlers.py:23\n Begin attempting to scrape: STG handlers.py:27\n Deleting existing data... base.py:239\n[09:08:44] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[09:08:45] ...data deleted. base.py:246\n Scraping from https://www.stirling.gov.uk/councillors base.py:42\n[09:08:48] list index out of range handlers.py:36\n Finished attempting to scrape: STG base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 148, in get_list_container\n return selected[0]\nIndexError: list index out of range\n","start":"2023-11-07 09:08:43.462403","end":"2023-11-07 09:08:48.718455","duration":5}},{"council_id":"TES","missing":false,"latest_run":{"status_code":1,"log_text":"[10:25:46] Fetching Scraper for: TES handlers.py:23\n Begin attempting to scrape: TES handlers.py:27\n Deleting existing data... base.py:239\n[10:25:47] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[10:25:48] ...data deleted. base.py:246\n Scraping from base.py:42\n http://testvalley.cmis.uk.com/testvalleypublic/ElectedRepr \n esentatives/tabid/63/ScreenMode/Alphabetical/Default.aspx# \n MemberSectionA \n 404 Client Error: Not Found for url: handlers.py:36\n http://testvalley.cmis.uk.com/testvalleypublic/Elected \n Representatives/tabid/63/ScreenMode/Alphabetical/Defau \n lt.aspx#MemberSectionA \n Finished attempting to scrape: TES base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 251, in get_councillors\n req = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: http://testvalley.cmis.uk.com/testvalleypublic/ElectedRepresentatives/tabid/63/ScreenMode/Alphabetical/Default.aspx#MemberSectionA\n","start":"2023-11-07 10:25:46.286201","end":"2023-11-07 10:25:48.516838","duration":2}},{"council_id":"THE","missing":false,"latest_run":{"status_code":1,"log_text":"[08:58:48] Fetching Scraper for: THE handlers.py:23\n Begin attempting to scrape: THE handlers.py:27\n[08:58:49] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[08:58:50] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.threerivers.gov.uk/listing/councillors \n[08:58:52] 'NoneType' object has no attribute 'findNext' handlers.py:36\n Finished attempting to scrape: THE base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"scrapers/THE-three-rivers/councillors.py\", line 13, in get_list_container\n return soup.find(\"h3\", text=\"District Councillor\").findNext(\"ul\")\nAttributeError: 'NoneType' object has no attribute 'findNext'\n","start":"2023-11-07 08:58:48.586656","end":"2023-11-07 08:58:52.496373","duration":3}},{"council_id":"WRT","missing":false,"latest_run":{"status_code":1,"log_text":"[09:38:35] Fetching Scraper for: WRT handlers.py:23\n Begin attempting to scrape: WRT handlers.py:27\n Deleting existing data... base.py:239\n[09:38:36] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[09:38:37] ...data deleted. base.py:246\n Scraping from https://www.warrington.gov.uk/councillors base.py:42\n[09:38:39] More than one element selected handlers.py:36\n[09:38:40] Finished attempting to scrape: WRT base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 147, in get_list_container\n raise ValueError(\"More than one element selected\")\nValueError: More than one element selected\n","start":"2023-11-07 09:38:35.553102","end":"2023-11-07 09:38:40.169158","duration":4}}] +[{"council_id":"AGB","missing":false,"latest_run":{"status_code":1,"log_text":"[10:04:38] Fetching Scraper for: AGB handlers.py:23\n Begin attempting to scrape: AGB handlers.py:27\n[10:04:39] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n[10:04:40] ...found 16 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n ...found 16 files in Councillors/raw base.py:207\n ...found 33 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 33 files base.py:216\n[10:04:41] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.argyll-bute.gov.uk/councillor_list \n[10:04:44] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/argyll-and-bute-council/cou \n ncillors-directory/south-kintyre/councillor/john-armour \n[10:04:45] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/argyll-and-bute-council/cou \n ncillors-directory/cowal/councillor/gordon-blair \n[10:04:47] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/mid-argyll/councillor/jan-brown \n[10:04:48] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/helensburgh-and-lomond-south/councillor/math-campbel \n l-sturgess \n[10:04:49] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/argyll-and-bute-council/cou \n ncillors-directory/mid-argyll/councillor/garret-corner \n[10:04:50] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/lomond-north/councillor/maurice-corry \n[10:04:51] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/kintyre-and-islands/councillor/robin-currie \n[10:04:52] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/dunoon/councillor/audrey-e-forrest \n[10:04:54] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-north-and-lorn/councillor/kieron-green \n[10:04:55] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-south-and-isles/councillor/amanda-hampsey \n[10:04:56] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/dunoon/councillor/daniel-hampsey \n[10:04:57] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/helensburgh-central/councillor/graham-archibald-hard \n ie \n[10:04:58] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/councillor_list?page=1 \n[10:05:00] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/helensburgh-central/councillor/fiona-howard \n[10:05:02] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-south-and-isles/councillor/willie-hume \n[10:05:03] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/lomond-north/councillor/mark-irvine \n[10:05:05] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/oban-south-and-isles/councillor/andrew-kain \n[10:05:06] Scraping from base.py:42\n https://www.argyll-bute.gov.uk/my-council/councillors-dire \n ctory/south-kintyre/councillor/jennifer-kelly \n[10:05:07] 'NoneType' object has no attribute 'img' handlers.py:36\n Committing batch 1 consisting of 32 files base.py:274\n[10:05:09] Finished attempting to scrape: AGB base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 52, in run\n councillor = self.get_single_councillor(councillor_html)\n File \"scrapers/AGB-argyll-and-bute/councillors.py\", line 37, in get_single_councillor\n councillor.photo_url = soup.select_one(\".field--name-field-photo\").img[\"src\"]\nAttributeError: 'NoneType' object has no attribute 'img'\n","start":"2023-11-09 10:04:38.728115","end":"2023-11-09 10:05:09.038290","duration":30}},{"council_id":"ELS","missing":false,"latest_run":{"status_code":1,"log_text":"[09:10:09] Fetching Scraper for: ELS handlers.py:23\n Begin attempting to scrape: ELS handlers.py:27\n[09:10:10] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[09:10:11] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.cne-siar.gov.uk/your-council/wards-and-council \n lors/council-members/ \n HTTPSConnectionPool(host='www.cne-siar.gov.uk', handlers.py:36\n port=443): Max retries exceeded with url: \n /your-council/wards-and-councillors/council-members/ \n (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno -2] Name or service not known')) \n Finished attempting to scrape: ELS base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 72, in create_connection\n for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):\n File \"/var/lang/lib/python3.8/socket.py\", line 918, in getaddrinfo\n for res in _socket.getaddrinfo(host, port, family, type, proto, flags):\nsocket.gaierror: [Errno -2] Name or service not known\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 363, in connect\n self.sock = conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno -2] Name or service not known\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='www.cne-siar.gov.uk', port=443): Max retries exceeded with url: /your-council/wards-and-councillors/council-members/ (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -2] Name or service not known'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 144, in get_list_container\n self.base_url_soup = self.get_page(self.base_url)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 131, in get_page\n page = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPSConnectionPool(host='www.cne-siar.gov.uk', port=443): Max retries exceeded with url: /your-council/wards-and-councillors/council-members/ (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -2] Name or service not known'))\n","start":"2023-11-09 09:10:09.664079","end":"2023-11-09 09:10:11.836052","duration":2}},{"council_id":"HIN","missing":false,"latest_run":{"status_code":1,"log_text":"[08:56:21] Fetching Scraper for: HIN handlers.py:23\n Begin attempting to scrape: HIN handlers.py:27\n[08:56:22] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n[08:56:23] ...found 34 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n ...found 34 files in Councillors/raw base.py:207\n ...found 69 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 69 files base.py:216\n[08:56:24] ...data deleted. base.py:246\n Scraping from base.py:42\n http://moderngov.hinckley-bosworth.gov.uk/mgWebService.asm \n x/GetCouncillorsByWard \n[08:56:55] HTTPConnectionPool(host='moderngov.hinckley-bosworth.g handlers.py:36\n ov.uk', port=80): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno 111] Connection refused')) \n[08:56:56] Finished attempting to scrape: HIN base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 95, in create_connection\n raise err\n File \"/opt/python/urllib3/util/connection.py\", line 85, in create_connection\n sock.connect(sa)\nConnectionRefusedError: [Errno 111] Connection refused\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 416, in _make_request\n conn.request(method, url, **httplib_request_kw)\n File \"/opt/python/urllib3/connection.py\", line 244, in request\n super(HTTPConnection, self).request(method, url, body=body, headers=headers)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1256, in request\n self._send_request(method, url, body, headers, encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1302, in _send_request\n self.endheaders(body, encode_chunked=encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1251, in endheaders\n self._send_output(message_body, encode_chunked=encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1011, in _send_output\n self.send(msg)\n File \"/var/lang/lib/python3.8/http/client.py\", line 951, in send\n self.connect()\n File \"/opt/python/urllib3/connection.py\", line 205, in connect\n conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='moderngov.hinckley-bosworth.gov.uk', port=80): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPConnectionPool(host='moderngov.hinckley-bosworth.gov.uk', port=80): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))\n","start":"2023-11-09 08:56:21.780698","end":"2023-11-09 08:56:56.246738","duration":34}},{"council_id":"MEL","missing":false,"latest_run":{"status_code":1,"log_text":"[08:25:51] Fetching Scraper for: MEL handlers.py:23\n Begin attempting to scrape: MEL handlers.py:27\n Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n[08:25:52] Getting all files in Councillors/json... base.py:191\n ...found 28 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n ...found 28 files in Councillors/raw base.py:207\n ...found 57 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 57 files base.py:216\n[08:25:53] ...data deleted. base.py:246\n Scraping from base.py:42\n https://democracy.melton.gov.uk/mgWebService.asmx/GetCounc \n illorsByWard \n[08:27:48] ('Connection aborted.', ConnectionResetError(104, handlers.py:36\n 'Connection reset by peer')) \n[08:27:49] Finished attempting to scrape: MEL base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 419, in connect\n self.sock = ssl_wrap_socket(\n File \"/opt/python/urllib3/util/ssl_.py\", line 449, in ssl_wrap_socket\n ssl_sock = _ssl_wrap_socket_impl(\n File \"/opt/python/urllib3/util/ssl_.py\", line 493, in _ssl_wrap_socket_impl\n return ssl_context.wrap_socket(sock, server_hostname=server_hostname)\n File \"/var/lang/lib/python3.8/ssl.py\", line 500, in wrap_socket\n return self.sslsocket_class._create(\n File \"/var/lang/lib/python3.8/ssl.py\", line 1073, in _create\n self.do_handshake()\n File \"/var/lang/lib/python3.8/ssl.py\", line 1342, in do_handshake\n self._sslobj.do_handshake()\nConnectionResetError: [Errno 104] Connection reset by peer\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 550, in increment\n raise six.reraise(type(error), error, _stacktrace)\n File \"/opt/python/urllib3/packages/six.py\", line 769, in reraise\n raise value.with_traceback(tb)\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 419, in connect\n self.sock = ssl_wrap_socket(\n File \"/opt/python/urllib3/util/ssl_.py\", line 449, in ssl_wrap_socket\n ssl_sock = _ssl_wrap_socket_impl(\n File \"/opt/python/urllib3/util/ssl_.py\", line 493, in _ssl_wrap_socket_impl\n return ssl_context.wrap_socket(sock, server_hostname=server_hostname)\n File \"/var/lang/lib/python3.8/ssl.py\", line 500, in wrap_socket\n return self.sslsocket_class._create(\n File \"/var/lang/lib/python3.8/ssl.py\", line 1073, in _create\n self.do_handshake()\n File \"/var/lang/lib/python3.8/ssl.py\", line 1342, in do_handshake\n self._sslobj.do_handshake()\nurllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 501, in send\n raise ConnectionError(err, request=request)\nrequests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n","start":"2023-11-09 08:25:51.104931","end":"2023-11-09 08:27:49.077352","duration":117}},{"council_id":"NGM","missing":false,"latest_run":{"status_code":1,"log_text":"[10:03:56] Fetching Scraper for: NGM handlers.py:23\n Begin attempting to scrape: NGM handlers.py:27\n[10:03:57] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n ...found 55 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n[10:03:58] ...found 55 files in Councillors/raw base.py:207\n ...found 111 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 100 files base.py:216\n[10:03:59] Deleting batch no. 2 consisting of 11 files base.py:216\n ...data deleted. base.py:246\n Scraping from base.py:42\n http://committee.nottinghamcity.gov.uk/mgWebService.asmx/G \n etCouncillorsByWard \n[10:04:12] ('Connection aborted.', ConnectionResetError(104, handlers.py:36\n 'Connection reset by peer')) \n Finished attempting to scrape: NGM base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 467, in _make_request\n six.raise_from(e, None)\n File \"\", line 3, in raise_from\n File \"/opt/python/urllib3/connectionpool.py\", line 462, in _make_request\n httplib_response = conn.getresponse()\n File \"/var/lang/lib/python3.8/http/client.py\", line 1348, in getresponse\n response.begin()\n File \"/var/lang/lib/python3.8/http/client.py\", line 316, in begin\n version, status, reason = self._read_status()\n File \"/var/lang/lib/python3.8/http/client.py\", line 277, in _read_status\n line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n File \"/var/lang/lib/python3.8/socket.py\", line 669, in readinto\n return self._sock.recv_into(b)\n File \"/var/lang/lib/python3.8/ssl.py\", line 1274, in recv_into\n return self.read(nbytes, buffer)\n File \"/var/lang/lib/python3.8/ssl.py\", line 1132, in read\n return self._sslobj.read(len, buffer)\nConnectionResetError: [Errno 104] Connection reset by peer\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 550, in increment\n raise six.reraise(type(error), error, _stacktrace)\n File \"/opt/python/urllib3/packages/six.py\", line 769, in reraise\n raise value.with_traceback(tb)\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 467, in _make_request\n six.raise_from(e, None)\n File \"\", line 3, in raise_from\n File \"/opt/python/urllib3/connectionpool.py\", line 462, in _make_request\n httplib_response = conn.getresponse()\n File \"/var/lang/lib/python3.8/http/client.py\", line 1348, in getresponse\n response.begin()\n File \"/var/lang/lib/python3.8/http/client.py\", line 316, in begin\n version, status, reason = self._read_status()\n File \"/var/lang/lib/python3.8/http/client.py\", line 277, in _read_status\n line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n File \"/var/lang/lib/python3.8/socket.py\", line 669, in readinto\n return self._sock.recv_into(b)\n File \"/var/lang/lib/python3.8/ssl.py\", line 1274, in recv_into\n return self.read(nbytes, buffer)\n File \"/var/lang/lib/python3.8/ssl.py\", line 1132, in read\n return self._sslobj.read(len, buffer)\nurllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 725, in send\n history = [resp for resp in gen]\n File \"/opt/python/requests/sessions.py\", line 725, in \n history = [resp for resp in gen]\n File \"/opt/python/requests/sessions.py\", line 266, in resolve_redirects\n resp = self.send(\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 501, in send\n raise ConnectionError(err, request=request)\nrequests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n","start":"2023-11-09 10:03:56.687068","end":"2023-11-09 10:04:13.000608","duration":16}},{"council_id":"OAD","missing":false,"latest_run":{"status_code":1,"log_text":"[10:30:18] Fetching Scraper for: OAD handlers.py:23\n Begin attempting to scrape: OAD handlers.py:27\n[10:30:19] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n[10:30:20] ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n ...data deleted. base.py:246\n Scraping from base.py:42\n http://moderngov.oadby-wigston.gov.uk/mgWebService.asmx/Ge \n tCouncillorsByWard \n[10:30:21] ('Connection aborted.', ConnectionResetError(104, handlers.py:36\n 'Connection reset by peer')) \n Finished attempting to scrape: OAD base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 467, in _make_request\n six.raise_from(e, None)\n File \"\", line 3, in raise_from\n File \"/opt/python/urllib3/connectionpool.py\", line 462, in _make_request\n httplib_response = conn.getresponse()\n File \"/var/lang/lib/python3.8/http/client.py\", line 1348, in getresponse\n response.begin()\n File \"/var/lang/lib/python3.8/http/client.py\", line 316, in begin\n version, status, reason = self._read_status()\n File \"/var/lang/lib/python3.8/http/client.py\", line 277, in _read_status\n line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n File \"/var/lang/lib/python3.8/socket.py\", line 669, in readinto\n return self._sock.recv_into(b)\nConnectionResetError: [Errno 104] Connection reset by peer\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 550, in increment\n raise six.reraise(type(error), error, _stacktrace)\n File \"/opt/python/urllib3/packages/six.py\", line 769, in reraise\n raise value.with_traceback(tb)\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 467, in _make_request\n six.raise_from(e, None)\n File \"\", line 3, in raise_from\n File \"/opt/python/urllib3/connectionpool.py\", line 462, in _make_request\n httplib_response = conn.getresponse()\n File \"/var/lang/lib/python3.8/http/client.py\", line 1348, in getresponse\n response.begin()\n File \"/var/lang/lib/python3.8/http/client.py\", line 316, in begin\n version, status, reason = self._read_status()\n File \"/var/lang/lib/python3.8/http/client.py\", line 277, in _read_status\n line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n File \"/var/lang/lib/python3.8/socket.py\", line 669, in readinto\n return self._sock.recv_into(b)\nurllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 501, in send\n raise ConnectionError(err, request=request)\nrequests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))\n","start":"2023-11-09 10:30:18.960722","end":"2023-11-09 10:30:21.785348","duration":2}},{"council_id":"ORK","missing":false,"latest_run":{"status_code":1,"log_text":"[09:24:00] Fetching Scraper for: ORK handlers.py:23\n Begin attempting to scrape: ORK handlers.py:27\n Deleting existing data... base.py:239\n[09:24:01] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[09:24:02] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.orkney.gov.uk/Council/Councillors/councillor-p \n rofiles.htm \n 404 Client Error: Not Found for url: handlers.py:36\n https://www.orkney.gov.uk/Council/Councillors/councill \n or-profiles.htm \n Finished attempting to scrape: ORK base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 144, in get_list_container\n self.base_url_soup = self.get_page(self.base_url)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 131, in get_page\n page = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://www.orkney.gov.uk/Council/Councillors/councillor-profiles.htm\n","start":"2023-11-09 09:24:00.611523","end":"2023-11-09 09:24:02.932748","duration":2}},{"council_id":"OXF","missing":false,"latest_run":{"status_code":1,"log_text":"[09:55:32] Fetching Scraper for: OXF handlers.py:23\n Begin attempting to scrape: OXF handlers.py:27\n Deleting existing data... base.py:239\n[09:55:33] Getting all files in Councillors... base.py:191\n Getting all files in Councillors/json... base.py:191\n ...found 63 files in Councillors/json base.py:207\n Getting all files in Councillors/raw... base.py:191\n ...found 63 files in Councillors/raw base.py:207\n ...found 127 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 100 files base.py:216\n[09:55:34] Deleting batch no. 2 consisting of 27 files base.py:216\n[09:55:35] ...data deleted. base.py:246\n Scraping from base.py:42\n https://mycouncil.oxfordshire.gov.uk/mgWebService.asmx/Get \n CouncillorsByWard \n[09:55:45] HTTPSConnectionPool(host='mycouncil.oxfordshire.gov.uk handlers.py:36\n ', port=443): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno -2] Name or service not known')) \n Finished attempting to scrape: OXF base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 72, in create_connection\n for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):\n File \"/var/lang/lib/python3.8/socket.py\", line 918, in getaddrinfo\n for res in _socket.getaddrinfo(host, port, family, type, proto, flags):\nsocket.gaierror: [Errno -2] Name or service not known\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 363, in connect\n self.sock = conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno -2] Name or service not known\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='mycouncil.oxfordshire.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -2] Name or service not known'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPSConnectionPool(host='mycouncil.oxfordshire.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -2] Name or service not known'))\n","start":"2023-11-09 09:55:32.273130","end":"2023-11-09 09:55:45.845427","duration":13}},{"council_id":"SFT","missing":false,"latest_run":{"status_code":1,"log_text":"[10:07:37] Fetching Scraper for: SFT handlers.py:23\n Begin attempting to scrape: SFT handlers.py:27\n Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n[10:07:38] ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n ...data deleted. base.py:246\n Scraping from base.py:42\n http://modgov.sefton.gov.uk/mgWebService.asmx/GetCouncillo \n rsByWard \n[10:07:39] HTTPSConnectionPool(host='modgov.sefton.gov.uk', handlers.py:36\n port=443): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n SSLError(SSLCertVerificationError(1, '[SSL: \n CERTIFICATE_VERIFY_FAILED] certificate verify failed: \n unable to get local issuer certificate \n (_ssl.c:1131)'))) \n Finished attempting to scrape: SFT base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 404, in _make_request\n self._validate_conn(conn)\n File \"/opt/python/urllib3/connectionpool.py\", line 1058, in _validate_conn\n conn.connect()\n File \"/opt/python/urllib3/connection.py\", line 419, in connect\n self.sock = ssl_wrap_socket(\n File \"/opt/python/urllib3/util/ssl_.py\", line 449, in ssl_wrap_socket\n ssl_sock = _ssl_wrap_socket_impl(\n File \"/opt/python/urllib3/util/ssl_.py\", line 493, in _ssl_wrap_socket_impl\n return ssl_context.wrap_socket(sock, server_hostname=server_hostname)\n File \"/var/lang/lib/python3.8/ssl.py\", line 500, in wrap_socket\n return self.sslsocket_class._create(\n File \"/var/lang/lib/python3.8/ssl.py\", line 1073, in _create\n self.do_handshake()\n File \"/var/lang/lib/python3.8/ssl.py\", line 1342, in do_handshake\n self._sslobj.do_handshake()\nssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1131)\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='modgov.sefton.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1131)')))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 725, in send\n history = [resp for resp in gen]\n File \"/opt/python/requests/sessions.py\", line 725, in \n history = [resp for resp in gen]\n File \"/opt/python/requests/sessions.py\", line 266, in resolve_redirects\n resp = self.send(\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 517, in send\n raise SSLError(e, request=request)\nrequests.exceptions.SSLError: HTTPSConnectionPool(host='modgov.sefton.gov.uk', port=443): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (_ssl.c:1131)')))\n","start":"2023-11-09 10:07:37.016583","end":"2023-11-09 10:07:39.421585","duration":2}},{"council_id":"SHE","missing":false,"latest_run":{"status_code":null,"log_text":"[11:28:20] Fetching Scraper for: SHE handlers.py:22\n Begin attempting to scrape: SHE handlers.py:25\n Deleting existing data... base.py:234\n Getting all files in SHE... base.py:186\n[11:28:21] Getting all files in SHE/json... base.py:186\n ...found 30 files in SHE/json base.py:202\n Getting all files in SHE/raw... base.py:186\n ...found 30 files in SHE/raw base.py:202\n ...found 61 files in SHE base.py:202\n Deleting batch no. 1 consisting of 61 files base.py:211\n[11:28:32] An error occurred (ThrottlingException) when calling handlers.py:34\n the CreateCommit operation (reached max retries: 4): \n Rate exceeded \n Finished attempting to scrape: SHE base.py:319\n","errors":"An error occurred (ThrottlingException) when calling the CreateCommit operation (reached max retries: 4): Rate exceeded","start":"2022-04-04 11:28:20.509898","end":"2022-04-04 11:28:32.871624","duration":12}},{"council_id":"SHN","missing":false,"latest_run":{"status_code":1,"log_text":"[08:54:38] Fetching Scraper for: SHN handlers.py:23\n Begin attempting to scrape: SHN handlers.py:27\n Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n[08:54:39] ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n ...data deleted. base.py:246\n Scraping from base.py:42\n http://moderngov.sthelens.gov.uk/mgWebService.asmx/GetCoun \n cillorsByWard \n[08:54:43] HTTPConnectionPool(host='moderngov.sthelens.gov.uk', handlers.py:36\n port=80): Max retries exceeded with url: \n /mgWebService.asmx/GetCouncillorsByWard (Caused by \n NewConnectionError(': Failed to establish a new \n connection: [Errno 113] No route to host')) \n Finished attempting to scrape: SHN base.py:324\n","errors":"Traceback (most recent call last):\n File \"/opt/python/urllib3/connection.py\", line 174, in _new_conn\n conn = connection.create_connection(\n File \"/opt/python/urllib3/util/connection.py\", line 95, in create_connection\n raise err\n File \"/opt/python/urllib3/util/connection.py\", line 85, in create_connection\n sock.connect(sa)\nOSError: [Errno 113] No route to host\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/urllib3/connectionpool.py\", line 715, in urlopen\n httplib_response = self._make_request(\n File \"/opt/python/urllib3/connectionpool.py\", line 416, in _make_request\n conn.request(method, url, **httplib_request_kw)\n File \"/opt/python/urllib3/connection.py\", line 244, in request\n super(HTTPConnection, self).request(method, url, body=body, headers=headers)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1256, in request\n self._send_request(method, url, body, headers, encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1302, in _send_request\n self.endheaders(body, encode_chunked=encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1251, in endheaders\n self._send_output(message_body, encode_chunked=encode_chunked)\n File \"/var/lang/lib/python3.8/http/client.py\", line 1011, in _send_output\n self.send(msg)\n File \"/var/lang/lib/python3.8/http/client.py\", line 951, in send\n self.connect()\n File \"/opt/python/urllib3/connection.py\", line 205, in connect\n conn = self._new_conn()\n File \"/opt/python/urllib3/connection.py\", line 186, in _new_conn\n raise NewConnectionError(\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 113] No route to host\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/opt/python/requests/adapters.py\", line 486, in send\n resp = conn.urlopen(\n File \"/opt/python/urllib3/connectionpool.py\", line 799, in urlopen\n retries = retries.increment(\n File \"/opt/python/urllib3/util/retry.py\", line 592, in increment\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\nurllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='moderngov.sthelens.gov.uk', port=80): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 113] No route to host'))\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 182, in run\n wards = self.get_councillors()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 199, in get_councillors\n req = self.get(self.format_councillor_api_url(), verify=self.verify_requests)\n File \"/var/task/lgsf/scrapers/base.py\", line 48, in get\n response = self.requests_session.get(url, headers=headers, verify=verify)\n File \"/opt/python/requests/sessions.py\", line 602, in get\n return self.request(\"GET\", url, **kwargs)\n File \"/opt/python/requests/sessions.py\", line 589, in request\n resp = self.send(prep, **send_kwargs)\n File \"/opt/python/requests/sessions.py\", line 703, in send\n r = adapter.send(request, **kwargs)\n File \"/opt/python/requests/adapters.py\", line 519, in send\n raise ConnectionError(e, request=request)\nrequests.exceptions.ConnectionError: HTTPConnectionPool(host='moderngov.sthelens.gov.uk', port=80): Max retries exceeded with url: /mgWebService.asmx/GetCouncillorsByWard (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 113] No route to host'))\n","start":"2023-11-09 08:54:38.190384","end":"2023-11-09 08:54:43.385892","duration":5}},{"council_id":"SNO","missing":false,"latest_run":{"status_code":1,"log_text":"[10:12:58] Fetching Scraper for: SNO handlers.py:23\n Begin attempting to scrape: SNO handlers.py:27\n Deleting existing data... base.py:239\n[10:12:59] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[10:13:00] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.southnorfolkandbroadland.gov.uk/directory/3/so \n uth-norfolk-councillor-directory/category/11 \n 404 Client Error: Not Found for url: handlers.py:36\n https://www.southnorfolkandbroadland.gov.uk/directory/ \n 3/south-norfolk-councillor-directory/category/11 \n Finished attempting to scrape: SNO base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 144, in get_list_container\n self.base_url_soup = self.get_page(self.base_url)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 131, in get_page\n page = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://www.southnorfolkandbroadland.gov.uk/directory/3/south-norfolk-councillor-directory/category/11\n","start":"2023-11-09 10:12:58.576916","end":"2023-11-09 10:13:00.893343","duration":2}},{"council_id":"SST","missing":false,"latest_run":{"status_code":1,"log_text":"[08:35:18] Fetching Scraper for: SST handlers.py:23\n Begin attempting to scrape: SST handlers.py:27\n Deleting existing data... base.py:239\n[08:35:19] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[08:35:20] ...data deleted. base.py:246\n Scraping from base.py:42\n https://services.sstaffs.gov.uk/cmis/Councillors.aspx \n 404 Client Error: Not Found for url: handlers.py:36\n https://services.sstaffs.gov.uk/cmis/Councillors.aspx \n Finished attempting to scrape: SST base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 251, in get_councillors\n req = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://services.sstaffs.gov.uk/cmis/Councillors.aspx\n","start":"2023-11-09 08:35:18.152574","end":"2023-11-09 08:35:20.586325","duration":2}},{"council_id":"STG","missing":false,"latest_run":{"status_code":1,"log_text":"[08:25:43] Fetching Scraper for: STG handlers.py:23\n Begin attempting to scrape: STG handlers.py:27\n Deleting existing data... base.py:239\n[08:25:44] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[08:25:45] ...data deleted. base.py:246\n Scraping from https://www.stirling.gov.uk/councillors base.py:42\n[08:25:47] list index out of range handlers.py:36\n Finished attempting to scrape: STG base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 148, in get_list_container\n return selected[0]\nIndexError: list index out of range\n","start":"2023-11-09 08:25:43.371127","end":"2023-11-09 08:25:47.580276","duration":4}},{"council_id":"TES","missing":false,"latest_run":{"status_code":1,"log_text":"[10:08:09] Fetching Scraper for: TES handlers.py:23\n Begin attempting to scrape: TES handlers.py:27\n Deleting existing data... base.py:239\n[10:08:10] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[10:08:11] ...data deleted. base.py:246\n Scraping from base.py:42\n http://testvalley.cmis.uk.com/testvalleypublic/ElectedRepr \n esentatives/tabid/63/ScreenMode/Alphabetical/Default.aspx# \n MemberSectionA \n 404 Client Error: Not Found for url: handlers.py:36\n http://testvalley.cmis.uk.com/testvalleypublic/Elected \n Representatives/tabid/63/ScreenMode/Alphabetical/Defau \n lt.aspx#MemberSectionA \n Finished attempting to scrape: TES base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 251, in get_councillors\n req = self.get(\n File \"/var/task/lgsf/scrapers/base.py\", line 49, in get\n response.raise_for_status()\n File \"/opt/python/requests/models.py\", line 1021, in raise_for_status\n raise HTTPError(http_error_msg, response=self)\nrequests.exceptions.HTTPError: 404 Client Error: Not Found for url: http://testvalley.cmis.uk.com/testvalleypublic/ElectedRepresentatives/tabid/63/ScreenMode/Alphabetical/Default.aspx#MemberSectionA\n","start":"2023-11-09 10:08:09.463040","end":"2023-11-09 10:08:11.501422","duration":2}},{"council_id":"THE","missing":false,"latest_run":{"status_code":1,"log_text":"[08:57:00] Fetching Scraper for: THE handlers.py:23\n Begin attempting to scrape: THE handlers.py:27\n Deleting existing data... base.py:239\n[08:57:01] Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[08:57:02] ...data deleted. base.py:246\n Scraping from base.py:42\n https://www.threerivers.gov.uk/listing/councillors \n[08:57:04] 'NoneType' object has no attribute 'findNext' handlers.py:36\n[08:57:05] Finished attempting to scrape: THE base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"scrapers/THE-three-rivers/councillors.py\", line 13, in get_list_container\n return soup.find(\"h3\", text=\"District Councillor\").findNext(\"ul\")\nAttributeError: 'NoneType' object has no attribute 'findNext'\n","start":"2023-11-09 08:57:00.624258","end":"2023-11-09 08:57:05.178967","duration":4}},{"council_id":"WRT","missing":false,"latest_run":{"status_code":1,"log_text":"[09:24:14] Fetching Scraper for: WRT handlers.py:23\n Begin attempting to scrape: WRT handlers.py:27\n[09:24:15] Deleting existing data... base.py:239\n Getting all files in Councillors... base.py:191\n ...found 1 files in Councillors base.py:207\n Deleting batch no. 1 consisting of 1 files base.py:216\n[09:24:16] ...data deleted. base.py:246\n Scraping from https://www.warrington.gov.uk/councillors base.py:42\n[09:24:18] More than one element selected handlers.py:36\n[09:24:19] Finished attempting to scrape: WRT base.py:324\n","errors":"Traceback (most recent call last):\n File \"/var/task/lgsf/aws_lambda/handlers.py\", line 32, in scraper_worker_handler\n scraper.run(run_log)\n File \"/var/task/lgsf/councillors/scrapers.py\", line 50, in run\n for councillor_html in self.get_councillors():\n File \"/var/task/lgsf/councillors/scrapers.py\", line 151, in get_councillors\n container = self.get_list_container()\n File \"/var/task/lgsf/councillors/scrapers.py\", line 147, in get_list_container\n raise ValueError(\"More than one element selected\")\nValueError: More than one element selected\n","start":"2023-11-09 09:24:14.727951","end":"2023-11-09 09:24:19.247221","duration":4}}] diff --git a/index.html b/index.html index f83e512c5c..e1331950e7 100644 --- a/index.html +++ b/index.html @@ -49,14 +49,6 @@

All log runs

- -
-
- - -
-
-
@@ -129,6 +121,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -145,14 +145,6 @@

All log runs

- -
-
- - -
-
-
@@ -225,6 +217,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -241,14 +241,6 @@

All log runs

- -
-
- - -
-
-
@@ -321,6 +313,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -337,14 +337,6 @@

All log runs

- -
-
- - -
-
-
@@ -417,6 +409,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -433,14 +433,6 @@

All log runs

- -
-
- - -
-
-
@@ -513,6 +505,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -529,14 +529,6 @@

All log runs

- -
-
- - -
-
-
@@ -609,6 +601,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -625,14 +625,6 @@

All log runs

- -
-
- - -
-
-
@@ -705,6 +697,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -721,14 +721,6 @@

All log runs

- -
-
- - -
-
-
@@ -801,6 +793,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -817,14 +817,6 @@

All log runs

- -
-
- - -
-
-
@@ -897,6 +889,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -913,14 +913,6 @@

All log runs

- -
-
- - -
-
-
@@ -993,6 +985,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1009,14 +1009,6 @@

All log runs

- -
-
- - -
-
-
@@ -1089,6 +1081,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1105,14 +1105,6 @@

All log runs

- -
-
- - -
-
-
@@ -1185,6 +1177,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1201,14 +1201,6 @@

All log runs

- -
-
- - -
-
-
@@ -1281,6 +1273,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1297,14 +1297,6 @@

All log runs

- -
-
- - -
-
-
@@ -1377,6 +1369,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1393,14 +1393,6 @@

All log runs

- -
-
- - -
-
-
@@ -1473,6 +1465,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1489,14 +1489,6 @@

All log runs

- -
-
- - -
-
-
@@ -1569,6 +1561,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1585,14 +1585,6 @@

All log runs

- -
-
- - -
-
-
@@ -1665,6 +1657,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1681,14 +1681,6 @@

All log runs

- -
-
- - -
-
-
@@ -1761,6 +1753,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1777,14 +1777,6 @@

All log runs

- -
-
- - -
-
-
@@ -1857,6 +1849,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1873,14 +1873,6 @@

All log runs

- -
-
- - -
-
-
@@ -1953,6 +1945,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -1969,14 +1969,6 @@

All log runs

- -
-
- - -
-
-
@@ -2049,6 +2041,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2065,14 +2065,6 @@

All log runs

- -
-
- - -
-
-
@@ -2145,6 +2137,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2161,14 +2161,6 @@

All log runs

- -
-
- - -
-
-
@@ -2241,6 +2233,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2257,14 +2257,6 @@

All log runs

- -
-
- - -
-
-
@@ -2337,6 +2329,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2353,14 +2353,6 @@

All log runs

- -
-
- - -
-
-
@@ -2433,6 +2425,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2449,14 +2449,6 @@

All log runs

- -
-
- - -
-
-
@@ -2529,6 +2521,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2545,14 +2545,6 @@

All log runs

- -
-
- - -
-
-
@@ -2625,6 +2617,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2641,14 +2641,6 @@

All log runs

- -
-
- - -
-
-
@@ -2721,6 +2713,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2737,14 +2737,6 @@

All log runs

- -
-
- - -
-
-
@@ -2817,6 +2809,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2833,14 +2833,6 @@

All log runs

- -
-
- - -
-
-
@@ -2913,6 +2905,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -2929,14 +2929,6 @@

All log runs

- -
-
- - -
-
-
@@ -3009,6 +3001,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3025,14 +3025,6 @@

All log runs

- -
-
- - -
-
-
@@ -3105,6 +3097,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3121,14 +3121,6 @@

All log runs

- -
-
- - -
-
-
@@ -3201,6 +3193,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3217,14 +3217,6 @@

All log runs

- -
-
- - -
-
-
@@ -3297,6 +3289,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3313,14 +3313,6 @@

All log runs

- -
-
- - -
-
-
@@ -3393,6 +3385,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3409,14 +3409,6 @@

All log runs

- -
-
- - -
-
-
@@ -3489,6 +3481,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3505,14 +3505,6 @@

All log runs

- -
-
- - -
-
-
@@ -3585,6 +3577,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3601,14 +3601,6 @@

All log runs

- -
-
- - -
-
-
@@ -3681,6 +3673,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3697,14 +3697,6 @@

All log runs

- -
-
- - -
-
-
@@ -3777,6 +3769,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3793,14 +3793,6 @@

All log runs

- -
-
- - -
-
-
@@ -3873,6 +3865,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3889,14 +3889,6 @@

All log runs

- -
-
- - -
-
-
@@ -3969,6 +3961,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -3985,14 +3985,6 @@

All log runs

- -
-
- - -
-
-
@@ -4065,6 +4057,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4081,14 +4081,6 @@

All log runs

- -
-
- - -
-
-
@@ -4161,6 +4153,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4177,14 +4177,6 @@

All log runs

- -
-
- - -
-
-
@@ -4257,6 +4249,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4273,14 +4273,6 @@

All log runs

- -
-
- - -
-
-
@@ -4353,6 +4345,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4369,14 +4369,6 @@

All log runs

- -
-
- - -
-
-
@@ -4449,6 +4441,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4465,14 +4465,6 @@

All log runs

- -
-
- - -
-
-
@@ -4545,6 +4537,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4561,14 +4561,6 @@

All log runs

- -
-
- - -
-
-
@@ -4641,6 +4633,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4657,14 +4657,6 @@

All log runs

- -
-
- - -
-
-
@@ -4737,6 +4729,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4753,14 +4753,6 @@

All log runs

- -
-
- - -
-
-
@@ -4833,6 +4825,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4849,14 +4849,6 @@

All log runs

- -
-
- - -
-
-
@@ -4929,6 +4921,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -4945,14 +4945,6 @@

All log runs

- -
-
- - -
-
-
@@ -5025,6 +5017,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5041,14 +5041,6 @@

All log runs

- -
-
- - -
-
-
@@ -5121,6 +5113,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5137,14 +5137,6 @@

All log runs

- -
-
- - -
-
-
@@ -5217,6 +5209,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5233,14 +5233,6 @@

All log runs

- -
-
- - -
-
-
@@ -5313,6 +5305,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5329,14 +5329,6 @@

All log runs

- -
-
- - -
-
-
@@ -5409,6 +5401,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5425,14 +5425,6 @@

All log runs

- -
-
- - -
-
-
@@ -5505,6 +5497,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5521,14 +5521,6 @@

All log runs

- -
-
- - -
-
-
@@ -5601,6 +5593,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5617,14 +5617,6 @@

All log runs

- -
-
- - -
-
-
@@ -5697,6 +5689,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5713,14 +5713,6 @@

All log runs

- -
-
- - -
-
-
@@ -5793,6 +5785,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5809,14 +5809,6 @@

All log runs

- -
-
- - -
-
-
@@ -5889,6 +5881,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -5905,14 +5905,6 @@

All log runs

- -
-
- - -
-
-
@@ -5985,6 +5977,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6001,14 +6001,6 @@

All log runs

- -
-
- - -
-
-
@@ -6081,6 +6073,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6097,14 +6097,6 @@

All log runs

- -
-
- - -
-
-
@@ -6177,6 +6169,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6193,14 +6193,6 @@

All log runs

- -
-
- - -
-
-
@@ -6273,6 +6265,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6289,14 +6289,6 @@

All log runs

- -
-
- - -
-
-
@@ -6369,6 +6361,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6385,14 +6385,6 @@

All log runs

- -
-
- - -
-
-
@@ -6465,6 +6457,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6481,14 +6481,6 @@

All log runs

- -
-
- - -
-
-
@@ -6561,6 +6553,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6577,14 +6577,6 @@

All log runs

- -
-
- - -
-
-
@@ -6657,6 +6649,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6673,14 +6673,6 @@

All log runs

- -
-
- - -
-
-
@@ -6753,6 +6745,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6769,14 +6769,6 @@

All log runs

- -
-
- - -
-
-
@@ -6849,6 +6841,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6865,14 +6865,6 @@

All log runs

- -
-
- - -
-
-
@@ -6945,6 +6937,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -6961,14 +6961,6 @@

All log runs

- -
-
- - -
-
-
@@ -7041,6 +7033,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7057,14 +7057,6 @@

All log runs

- -
-
- - -
-
-
@@ -7137,6 +7129,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7153,14 +7153,6 @@

All log runs

- -
-
- - -
-
-
@@ -7233,6 +7225,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7249,14 +7249,6 @@

All log runs

- -
-
- - -
-
-
@@ -7329,6 +7321,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7345,14 +7345,6 @@

All log runs

- -
-
- - -
-
-
@@ -7425,6 +7417,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7441,14 +7441,6 @@

All log runs

- -
-
- - -
-
-
@@ -7521,6 +7513,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7537,14 +7537,6 @@

All log runs

- -
-
- - -
-
-
@@ -7617,6 +7609,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7633,14 +7633,6 @@

All log runs

- -
-
- - -
-
-
@@ -7713,6 +7705,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7729,14 +7729,6 @@

All log runs

- -
-
- - -
-
-
@@ -7809,6 +7801,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7825,14 +7825,6 @@

All log runs

- -
-
- - -
-
-
@@ -7905,6 +7897,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -7921,14 +7921,6 @@

All log runs

- -
-
- - -
-
-
@@ -8001,6 +7993,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8017,14 +8017,6 @@

All log runs

- -
-
- - -
-
-
@@ -8097,6 +8089,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8113,14 +8113,6 @@

All log runs

- -
-
- - -
-
-
@@ -8193,6 +8185,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8209,14 +8209,6 @@

All log runs

- -
-
- - -
-
-
@@ -8289,6 +8281,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8305,14 +8305,6 @@

All log runs

- -
-
- - -
-
-
@@ -8385,6 +8377,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8401,14 +8401,6 @@

All log runs

- -
-
- - -
-
-
@@ -8481,6 +8473,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8497,14 +8497,6 @@

All log runs

- -
-
- - -
-
-
@@ -8577,6 +8569,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8593,14 +8593,6 @@

All log runs

- -
-
- - -
-
-
@@ -8673,6 +8665,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8689,14 +8689,6 @@

All log runs

- -
-
- - -
-
-
@@ -8769,6 +8761,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8785,14 +8785,6 @@

All log runs

- -
-
- - -
-
-
@@ -8865,6 +8857,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8881,14 +8881,6 @@

All log runs

- -
-
- - -
-
-
@@ -8961,6 +8953,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -8977,14 +8977,6 @@

All log runs

- -
-
- - -
-
-
@@ -9057,6 +9049,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9073,14 +9073,6 @@

All log runs

- -
-
- - -
-
-
@@ -9153,6 +9145,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9169,14 +9169,6 @@

All log runs

- -
-
- - -
-
-
@@ -9249,6 +9241,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9265,14 +9265,6 @@

All log runs

- -
-
- - -
-
-
@@ -9345,6 +9337,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9361,14 +9361,6 @@

All log runs

- -
-
- - -
-
-
@@ -9441,6 +9433,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9457,14 +9457,6 @@

All log runs

- -
-
- - -
-
-
@@ -9537,6 +9529,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9553,14 +9553,6 @@

All log runs

- -
-
- - -
-
-
@@ -9633,6 +9625,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9649,14 +9649,6 @@

All log runs

- -
-
- - -
-
-
@@ -9729,6 +9721,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9745,14 +9745,6 @@

All log runs

- -
-
- - -
-
-
@@ -9825,6 +9817,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9841,14 +9841,6 @@

All log runs

- -
-
- - -
-
-
@@ -9921,6 +9913,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -9937,14 +9937,6 @@

All log runs

- -
-
- - -
-
-
@@ -10017,6 +10009,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10033,14 +10033,6 @@

All log runs

- -
-
- - -
-
-
@@ -10113,6 +10105,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10129,14 +10129,6 @@

All log runs

- -
-
- - -
-
-
@@ -10209,6 +10201,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10225,14 +10225,6 @@

All log runs

- -
-
- - -
-
-
@@ -10305,6 +10297,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10321,14 +10321,6 @@

All log runs

- -
-
- - -
-
-
@@ -10401,6 +10393,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10417,14 +10417,6 @@

All log runs

- -
-
- - -
-
-
@@ -10497,6 +10489,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10513,14 +10513,6 @@

All log runs

- -
-
- - -
-
-
@@ -10593,6 +10585,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10609,14 +10609,6 @@

All log runs

- -
-
- - -
-
-
@@ -10689,6 +10681,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10705,14 +10705,6 @@

All log runs

- -
-
- - -
-
-
@@ -10785,6 +10777,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10801,14 +10801,6 @@

All log runs

- -
-
- - -
-
-
@@ -10881,6 +10873,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10897,14 +10897,6 @@

All log runs

- -
-
- - -
-
-
@@ -10977,6 +10969,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -10993,84 +10993,84 @@

All log runs

@@ -11089,14 +11089,6 @@

All log runs

- -
-
- - -
-
-
@@ -11169,6 +11161,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11185,14 +11185,6 @@

All log runs

- -
-
- - -
-
-
@@ -11265,6 +11257,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11281,14 +11281,6 @@

All log runs

- -
-
- - -
-
-
@@ -11361,6 +11353,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11377,14 +11377,6 @@

All log runs

- -
-
- - -
-
-
@@ -11457,6 +11449,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11473,14 +11473,6 @@

All log runs

- -
-
- - -
-
-
@@ -11553,6 +11545,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11569,14 +11569,6 @@

All log runs

- -
-
- - -
-
-
@@ -11649,6 +11641,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11665,14 +11665,6 @@

All log runs

- -
-
- - -
-
-
@@ -11745,6 +11737,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11761,14 +11761,6 @@

All log runs

- -
-
- - -
-
-
@@ -11841,6 +11833,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11857,14 +11857,6 @@

All log runs

- -
-
- - -
-
-
@@ -11937,6 +11929,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -11953,14 +11953,6 @@

All log runs

- -
-
- - -
-
-
@@ -12033,6 +12025,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12049,14 +12049,6 @@

All log runs

- -
-
- - -
-
-
@@ -12129,6 +12121,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12145,83 +12145,83 @@

All log runs

- +
- -
-
- - +
- +
- +
- +
- -
+
+
- +
- +
- -
+
+
- +
- +
- +
- -
+
+
- -
+
+
- +
- -
+
+
- -
+
+
- -
+
+
- + +
+
+ +
@@ -12241,14 +12241,6 @@

All log runs

- -
-
- - -
-
-
@@ -12321,6 +12313,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12337,14 +12337,6 @@

All log runs

- -
-
- - -
-
-
@@ -12417,6 +12409,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12433,14 +12433,6 @@

All log runs

- -
-
- - -
-
-
@@ -12513,6 +12505,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12529,14 +12529,6 @@

All log runs

- -
-
- - -
-
-
@@ -12609,6 +12601,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12625,14 +12625,6 @@

All log runs

- -
-
- - -
-
-
@@ -12705,6 +12697,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12721,14 +12721,6 @@

All log runs

- -
-
- - -
-
-
@@ -12801,6 +12793,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12817,14 +12817,6 @@

All log runs

- -
-
- - -
-
-
@@ -12897,6 +12889,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -12913,14 +12913,6 @@

All log runs

- -
-
- - -
-
-
@@ -12993,6 +12985,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13009,14 +13009,6 @@

All log runs

- -
-
- - -
-
-
@@ -13089,6 +13081,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13105,14 +13105,6 @@

All log runs

- -
-
- - -
-
-
@@ -13185,6 +13177,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13201,14 +13201,6 @@

All log runs

- -
-
- - -
-
-
@@ -13281,6 +13273,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13297,14 +13297,6 @@

All log runs

- -
-
- - -
-
-
@@ -13377,6 +13369,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13393,14 +13393,6 @@

All log runs

- -
-
- - -
-
-
@@ -13473,6 +13465,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13489,14 +13489,6 @@

All log runs

- -
-
- - -
-
-
@@ -13569,6 +13561,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13585,14 +13585,6 @@

All log runs

- -
-
- - -
-
-
@@ -13665,30 +13657,30 @@

All log runs

+ +
+
+ + +
+
+
- + HIN - OK + ERROR
- -
-
- - -
-
-
@@ -13761,6 +13753,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13777,14 +13777,6 @@

All log runs

- -
-
- - -
-
-
@@ -13857,6 +13849,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13873,14 +13873,6 @@

All log runs

- -
-
- - -
-
-
@@ -13953,6 +13945,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -13969,14 +13969,6 @@

All log runs

- -
-
- - -
-
-
@@ -14049,6 +14041,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14065,14 +14065,6 @@

All log runs

- -
-
- - -
-
-
@@ -14145,6 +14137,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14161,14 +14161,6 @@

All log runs

- -
-
- - -
-
-
@@ -14241,6 +14233,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14257,14 +14257,6 @@

All log runs

- -
-
- - -
-
-
@@ -14337,6 +14329,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14353,14 +14353,6 @@

All log runs

- -
-
- - -
-
-
@@ -14433,6 +14425,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14449,14 +14449,6 @@

All log runs

- -
-
- - -
-
-
@@ -14529,6 +14521,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14545,14 +14545,6 @@

All log runs

- -
-
- - -
-
-
@@ -14625,6 +14617,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14641,14 +14641,6 @@

All log runs

- -
-
- - -
-
-
@@ -14721,6 +14713,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14737,14 +14737,6 @@

All log runs

- -
-
- - -
-
-
@@ -14817,6 +14809,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14833,14 +14833,6 @@

All log runs

- -
-
- - -
-
-
@@ -14913,6 +14905,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -14929,14 +14929,6 @@

All log runs

- -
-
- - -
-
-
@@ -15009,6 +15001,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15025,14 +15025,6 @@

All log runs

- -
-
- - -
-
-
@@ -15105,6 +15097,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15121,14 +15121,6 @@

All log runs

- -
-
- - -
-
-
@@ -15201,6 +15193,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15217,14 +15217,6 @@

All log runs

- -
-
- - -
-
-
@@ -15297,6 +15289,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15313,14 +15313,6 @@

All log runs

- -
-
- - -
-
-
@@ -15393,6 +15385,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15409,14 +15409,6 @@

All log runs

- -
-
- - -
-
-
@@ -15489,6 +15481,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15505,14 +15505,6 @@

All log runs

- -
-
- - -
-
-
@@ -15585,6 +15577,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15601,14 +15601,6 @@

All log runs

- -
-
- - -
-
-
@@ -15681,6 +15673,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15697,14 +15697,6 @@

All log runs

- -
-
- - -
-
-
@@ -15777,6 +15769,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15793,14 +15793,6 @@

All log runs

- -
-
- - -
-
-
@@ -15873,6 +15865,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15889,14 +15889,6 @@

All log runs

- -
-
- - -
-
-
@@ -15969,6 +15961,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -15985,14 +15985,6 @@

All log runs

- -
-
- - -
-
-
@@ -16065,6 +16057,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16081,14 +16081,6 @@

All log runs

- -
-
- - -
-
-
@@ -16161,6 +16153,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16177,14 +16177,6 @@

All log runs

- -
-
- - -
-
-
@@ -16257,6 +16249,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16273,14 +16273,6 @@

All log runs

- -
-
- - -
-
-
@@ -16353,6 +16345,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16369,14 +16369,6 @@

All log runs

- -
-
- - -
-
-
@@ -16449,6 +16441,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16465,14 +16465,6 @@

All log runs

- -
-
- - -
-
-
@@ -16545,6 +16537,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16561,14 +16561,6 @@

All log runs

- -
-
- - -
-
-
@@ -16641,6 +16633,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16657,14 +16657,6 @@

All log runs

- -
-
- - -
-
-
@@ -16737,6 +16729,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16753,14 +16753,6 @@

All log runs

- -
-
- - -
-
-
@@ -16833,6 +16825,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -16849,14 +16849,6 @@

All log runs

- -
-
- - -
-
-
@@ -16929,30 +16921,30 @@

All log runs

+ +
+
+ + +
+
+
- + LUT - ERROR + OK
- -
-
- - -
-
-
@@ -17025,6 +17017,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17041,14 +17041,6 @@

All log runs

- -
-
- - -
-
-
@@ -17121,6 +17113,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17137,14 +17137,6 @@

All log runs

- -
-
- - -
-
-
@@ -17217,6 +17209,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17233,14 +17233,6 @@

All log runs

- -
-
- - -
-
-
@@ -17313,6 +17305,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17329,14 +17329,6 @@

All log runs

- -
-
- - -
-
-
@@ -17409,30 +17401,30 @@

All log runs

+ +
+
+ + +
+
+
- + MDB - ERROR + OK
- -
-
- - -
-
-
@@ -17505,6 +17497,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17521,14 +17521,6 @@

All log runs

- -
-
- - -
-
-
@@ -17601,6 +17593,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17617,14 +17617,6 @@

All log runs

- -
-
- - -
-
-
@@ -17697,30 +17689,30 @@

All log runs

+ +
+
+ + +
+
+
- + MEL - OK + ERROR
- -
-
- - -
-
-
@@ -17793,6 +17785,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17809,14 +17809,6 @@

All log runs

- -
-
- - -
-
-
@@ -17889,6 +17881,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -17905,14 +17905,6 @@

All log runs

- -
-
- - -
-
-
@@ -17985,6 +17977,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18001,14 +18001,6 @@

All log runs

- -
-
- - -
-
-
@@ -18081,6 +18073,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18097,14 +18097,6 @@

All log runs

- -
-
- - -
-
-
@@ -18177,6 +18169,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18193,14 +18193,6 @@

All log runs

- -
-
- - -
-
-
@@ -18273,6 +18265,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18289,14 +18289,6 @@

All log runs

- -
-
- - -
-
-
@@ -18369,6 +18361,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18385,14 +18385,6 @@

All log runs

- -
-
- - -
-
-
@@ -18465,6 +18457,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18481,14 +18481,6 @@

All log runs

- -
-
- - -
-
-
@@ -18561,6 +18553,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18577,14 +18577,6 @@

All log runs

- -
-
- - -
-
-
@@ -18657,6 +18649,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18673,14 +18673,6 @@

All log runs

- -
-
- - -
-
-
@@ -18753,6 +18745,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18769,14 +18769,6 @@

All log runs

- -
-
- - -
-
-
@@ -18849,6 +18841,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18865,14 +18865,6 @@

All log runs

- -
-
- - -
-
-
@@ -18945,6 +18937,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -18961,14 +18961,6 @@

All log runs

- -
-
- - -
-
-
@@ -19041,6 +19033,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19057,14 +19057,6 @@

All log runs

- -
-
- - -
-
-
@@ -19137,6 +19129,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19153,14 +19153,6 @@

All log runs

- -
-
- - -
-
-
@@ -19233,6 +19225,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19249,14 +19249,6 @@

All log runs

- -
-
- - -
-
-
@@ -19329,6 +19321,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19345,14 +19345,6 @@

All log runs

- -
-
- - -
-
-
@@ -19425,6 +19417,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19441,14 +19441,6 @@

All log runs

- -
-
- - -
-
-
@@ -19521,6 +19513,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19537,14 +19537,6 @@

All log runs

- -
-
- - -
-
-
@@ -19617,6 +19609,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19633,14 +19633,6 @@

All log runs

- -
-
- - -
-
-
@@ -19713,30 +19705,30 @@

All log runs

+ +
+
+ + +
+
+
- + NGM - OK + ERROR
- -
-
- - -
-
-
@@ -19809,6 +19801,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19825,14 +19825,6 @@

All log runs

- -
-
- - -
-
-
@@ -19905,6 +19897,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -19921,14 +19921,6 @@

All log runs

- -
-
- - -
-
-
@@ -20001,6 +19993,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20017,14 +20017,6 @@

All log runs

- -
-
- - -
-
-
@@ -20097,6 +20089,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20113,14 +20113,6 @@

All log runs

- -
-
- - -
-
-
@@ -20193,6 +20185,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20209,14 +20209,6 @@

All log runs

- -
-
- - -
-
-
@@ -20289,6 +20281,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20305,14 +20305,6 @@

All log runs

- -
-
- - -
-
-
@@ -20385,6 +20377,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20401,14 +20401,6 @@

All log runs

- -
-
- - -
-
-
@@ -20481,6 +20473,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20497,14 +20497,6 @@

All log runs

- -
-
- - -
-
-
@@ -20577,6 +20569,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20593,14 +20593,6 @@

All log runs

- -
-
- - -
-
-
@@ -20673,6 +20665,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20689,14 +20689,6 @@

All log runs

- -
-
- - -
-
-
@@ -20769,6 +20761,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20785,14 +20785,6 @@

All log runs

- -
-
- - -
-
-
@@ -20865,6 +20857,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20881,14 +20881,6 @@

All log runs

- -
-
- - -
-
-
@@ -20961,6 +20953,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -20977,14 +20977,6 @@

All log runs

- -
-
- - -
-
-
@@ -21057,6 +21049,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21073,14 +21073,6 @@

All log runs

- -
-
- - -
-
-
@@ -21153,6 +21145,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21169,14 +21169,6 @@

All log runs

- -
-
- - -
-
-
@@ -21249,6 +21241,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21265,14 +21265,6 @@

All log runs

- -
-
- - -
-
-
@@ -21345,30 +21337,30 @@

All log runs

+ +
+
+ + +
+
+
- + OXF - OK + ERROR
- -
-
- - -
-
-
@@ -21441,6 +21433,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21457,14 +21457,6 @@

All log runs

- -
-
- - -
-
-
@@ -21537,6 +21529,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21553,14 +21553,6 @@

All log runs

- -
-
- - -
-
-
@@ -21633,6 +21625,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21649,14 +21649,6 @@

All log runs

- -
-
- - -
-
-
@@ -21729,6 +21721,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21745,14 +21745,6 @@

All log runs

- -
-
- - -
-
-
@@ -21825,6 +21817,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21841,14 +21841,6 @@

All log runs

- -
-
- - -
-
-
@@ -21921,6 +21913,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -21937,14 +21937,6 @@

All log runs

- -
-
- - -
-
-
@@ -22017,6 +22009,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22033,14 +22033,6 @@

All log runs

- -
-
- - -
-
-
@@ -22113,6 +22105,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22129,14 +22129,6 @@

All log runs

- -
-
- - -
-
-
@@ -22209,6 +22201,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22225,14 +22225,6 @@

All log runs

- -
-
- - -
-
-
@@ -22305,6 +22297,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22321,14 +22321,6 @@

All log runs

- -
-
- - -
-
-
@@ -22401,6 +22393,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22417,14 +22417,6 @@

All log runs

- -
-
- - -
-
-
@@ -22497,6 +22489,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22513,14 +22513,6 @@

All log runs

- -
-
- - -
-
-
@@ -22593,6 +22585,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22609,14 +22609,6 @@

All log runs

- -
-
- - -
-
-
@@ -22689,6 +22681,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22705,14 +22705,6 @@

All log runs

- -
-
- - -
-
-
@@ -22785,6 +22777,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22801,14 +22801,6 @@

All log runs

- -
-
- - -
-
-
@@ -22881,6 +22873,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22897,14 +22897,6 @@

All log runs

- -
-
- - -
-
-
@@ -22977,6 +22969,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -22993,14 +22993,6 @@

All log runs

- -
-
- - -
-
-
@@ -23073,6 +23065,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23089,14 +23089,6 @@

All log runs

- -
-
- - -
-
-
@@ -23169,6 +23161,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23185,14 +23185,6 @@

All log runs

- -
-
- - -
-
-
@@ -23265,6 +23257,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23281,14 +23281,6 @@

All log runs

- -
-
- - -
-
-
@@ -23361,6 +23353,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23377,14 +23377,6 @@

All log runs

- -
-
- - -
-
-
@@ -23457,6 +23449,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23473,14 +23473,6 @@

All log runs

- -
-
- - -
-
-
@@ -23553,6 +23545,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23569,14 +23569,6 @@

All log runs

- -
-
- - -
-
-
@@ -23649,6 +23641,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23665,14 +23665,6 @@

All log runs

- -
-
- - -
-
-
@@ -23745,6 +23737,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23761,14 +23761,6 @@

All log runs

- -
-
- - -
-
-
@@ -23841,6 +23833,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23857,14 +23857,6 @@

All log runs

- -
-
- - -
-
-
@@ -23937,6 +23929,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -23953,14 +23953,6 @@

All log runs

- -
-
- - -
-
-
@@ -24033,6 +24025,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24049,14 +24049,6 @@

All log runs

- -
-
- - -
-
-
@@ -24129,6 +24121,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24145,14 +24145,6 @@

All log runs

- -
-
- - -
-
-
@@ -24225,6 +24217,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24241,14 +24241,6 @@

All log runs

- -
-
- - -
-
-
@@ -24321,6 +24313,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24337,14 +24337,6 @@

All log runs

- -
-
- - -
-
-
@@ -24417,6 +24409,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24433,14 +24433,6 @@

All log runs

- -
-
- - -
-
-
@@ -24513,6 +24505,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24529,14 +24529,6 @@

All log runs

- -
-
- - -
-
-
@@ -24609,6 +24601,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24625,14 +24625,6 @@

All log runs

- -
-
- - -
-
-
@@ -24705,6 +24697,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24721,83 +24721,83 @@

All log runs

- +
- +
- +
- +
- +
- +
- +
- +
- +
- +
- +
- +
- +
- -
+
+
- -
+
+
- +
- -
+
+
- -
+
+
- +
- +
@@ -24817,14 +24817,6 @@

All log runs

- -
-
- - -
-
-
@@ -24897,6 +24889,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -24913,14 +24913,6 @@

All log runs

- -
-
- - -
-
-
@@ -24993,6 +24985,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25009,14 +25009,6 @@

All log runs

- -
-
- - -
-
-
@@ -25089,6 +25081,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25201,14 +25201,6 @@

All log runs

- -
-
- - -
-
-
@@ -25281,6 +25273,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25297,14 +25297,6 @@

All log runs

- -
-
- - -
-
-
@@ -25377,6 +25369,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25393,14 +25393,6 @@

All log runs

- -
-
- - -
-
-
@@ -25473,6 +25465,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25489,14 +25489,6 @@

All log runs

- -
-
- - -
-
-
@@ -25569,6 +25561,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25585,14 +25585,6 @@

All log runs

- -
-
- - -
-
-
@@ -25665,6 +25657,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25681,14 +25681,6 @@

All log runs

- -
-
- - -
-
-
@@ -25761,6 +25753,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25777,14 +25777,6 @@

All log runs

- -
-
- - -
-
-
@@ -25857,6 +25849,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25873,14 +25873,6 @@

All log runs

- -
-
- - -
-
-
@@ -25953,6 +25945,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -25969,14 +25969,6 @@

All log runs

- -
-
- - -
-
-
@@ -26049,6 +26041,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26065,14 +26065,6 @@

All log runs

- -
-
- - -
-
-
@@ -26145,6 +26137,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26161,14 +26161,6 @@

All log runs

- -
-
- - -
-
-
@@ -26241,6 +26233,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26257,14 +26257,6 @@

All log runs

- -
-
- - -
-
-
@@ -26337,6 +26329,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26353,14 +26353,6 @@

All log runs

- -
-
- - -
-
-
@@ -26433,6 +26425,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26449,14 +26449,6 @@

All log runs

- -
-
- - -
-
-
@@ -26529,6 +26521,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26545,14 +26545,6 @@

All log runs

- -
-
- - -
-
-
@@ -26625,6 +26617,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26641,14 +26641,6 @@

All log runs

- -
-
- - -
-
-
@@ -26721,6 +26713,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26737,14 +26737,6 @@

All log runs

- -
-
- - -
-
-
@@ -26817,6 +26809,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26833,14 +26833,6 @@

All log runs

- -
-
- - -
-
-
@@ -26913,6 +26905,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -26929,14 +26929,6 @@

All log runs

- -
-
- - -
-
-
@@ -27009,6 +27001,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27025,14 +27025,6 @@

All log runs

- -
-
- - -
-
-
@@ -27105,6 +27097,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27121,14 +27121,6 @@

All log runs

- -
-
- - -
-
-
@@ -27201,6 +27193,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27217,14 +27217,6 @@

All log runs

- -
-
- - -
-
-
@@ -27297,6 +27289,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27313,14 +27313,6 @@

All log runs

- -
-
- - -
-
-
@@ -27393,6 +27385,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27409,14 +27409,6 @@

All log runs

- -
-
- - -
-
-
@@ -27489,6 +27481,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27505,14 +27505,6 @@

All log runs

- -
-
- - -
-
-
@@ -27585,6 +27577,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27601,14 +27601,6 @@

All log runs

- -
-
- - -
-
-
@@ -27681,6 +27673,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27697,14 +27697,6 @@

All log runs

- -
-
- - -
-
-
@@ -27777,6 +27769,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27793,14 +27793,6 @@

All log runs

- -
-
- - -
-
-
@@ -27873,6 +27865,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27889,14 +27889,6 @@

All log runs

- -
-
- - -
-
-
@@ -27969,6 +27961,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -27985,14 +27985,6 @@

All log runs

- -
-
- - -
-
-
@@ -28065,6 +28057,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28081,14 +28081,6 @@

All log runs

- -
-
- - -
-
-
@@ -28161,6 +28153,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28177,14 +28177,6 @@

All log runs

- -
-
- - -
-
-
@@ -28257,6 +28249,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28273,14 +28273,6 @@

All log runs

- -
-
- - -
-
-
@@ -28353,6 +28345,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28369,14 +28369,6 @@

All log runs

- -
-
- - -
-
-
@@ -28449,6 +28441,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28465,14 +28465,6 @@

All log runs

- -
-
- - -
-
-
@@ -28545,6 +28537,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28561,14 +28561,6 @@

All log runs

- -
-
- - -
-
-
@@ -28641,6 +28633,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28657,14 +28657,6 @@

All log runs

- -
-
- - -
-
-
@@ -28737,6 +28729,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28753,14 +28753,6 @@

All log runs

- -
-
- - -
-
-
@@ -28833,6 +28825,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28849,14 +28849,6 @@

All log runs

- -
-
- - -
-
-
@@ -28929,6 +28921,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -28945,14 +28945,6 @@

All log runs

- -
-
- - -
-
-
@@ -29025,6 +29017,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29041,14 +29041,6 @@

All log runs

- -
-
- - -
-
-
@@ -29121,6 +29113,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29137,14 +29137,6 @@

All log runs

- -
-
- - -
-
-
@@ -29217,6 +29209,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29233,14 +29233,6 @@

All log runs

- -
-
- - -
-
-
@@ -29313,6 +29305,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29329,14 +29329,6 @@

All log runs

- -
-
- - -
-
-
@@ -29409,6 +29401,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29425,14 +29425,6 @@

All log runs

- -
-
- - -
-
-
@@ -29505,6 +29497,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29521,14 +29521,6 @@

All log runs

- -
-
- - -
-
-
@@ -29601,6 +29593,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29617,14 +29617,6 @@

All log runs

- -
-
- - -
-
-
@@ -29697,6 +29689,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29713,14 +29713,6 @@

All log runs

- -
-
- - -
-
-
@@ -29793,6 +29785,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29809,14 +29809,6 @@

All log runs

- -
-
- - -
-
-
@@ -29889,6 +29881,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -29905,14 +29905,6 @@

All log runs

- -
-
- - -
-
-
@@ -29985,6 +29977,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30001,14 +30001,6 @@

All log runs

- -
-
- - -
-
-
@@ -30081,6 +30073,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30097,14 +30097,6 @@

All log runs

- -
-
- - -
-
-
@@ -30177,6 +30169,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30193,14 +30193,6 @@

All log runs

- -
-
- - -
-
-
@@ -30273,6 +30265,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30289,14 +30289,6 @@

All log runs

- -
-
- - -
-
-
@@ -30369,6 +30361,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30385,14 +30385,6 @@

All log runs

- -
-
- - -
-
-
@@ -30465,6 +30457,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30481,14 +30481,6 @@

All log runs

- -
-
- - -
-
-
@@ -30561,6 +30553,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30577,14 +30577,6 @@

All log runs

- -
-
- - -
-
-
@@ -30657,6 +30649,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30673,14 +30673,6 @@

All log runs

- -
-
- - -
-
-
@@ -30753,6 +30745,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30769,14 +30769,6 @@

All log runs

- -
-
- - -
-
-
@@ -30849,6 +30841,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30865,14 +30865,6 @@

All log runs

- -
-
- - -
-
-
@@ -30945,6 +30937,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -30961,14 +30961,6 @@

All log runs

- -
-
- - -
-
-
@@ -31041,6 +31033,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31057,14 +31057,6 @@

All log runs

- -
-
- - -
-
-
@@ -31137,6 +31129,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31153,14 +31153,6 @@

All log runs

- -
-
- - -
-
-
@@ -31233,6 +31225,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31249,14 +31249,6 @@

All log runs

- -
-
- - -
-
-
@@ -31329,6 +31321,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31345,14 +31345,6 @@

All log runs

- -
-
- - -
-
-
@@ -31425,6 +31417,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31441,14 +31441,6 @@

All log runs

- -
-
- - -
-
-
@@ -31521,6 +31513,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31537,14 +31537,6 @@

All log runs

- -
-
- - -
-
-
@@ -31617,6 +31609,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31633,14 +31633,6 @@

All log runs

- -
-
- - -
-
-
@@ -31713,6 +31705,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31729,14 +31729,6 @@

All log runs

- -
-
- - -
-
-
@@ -31809,6 +31801,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31825,14 +31825,6 @@

All log runs

- -
-
- - -
-
-
@@ -31905,6 +31897,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -31921,14 +31921,6 @@

All log runs

- -
-
- - -
-
-
@@ -32001,6 +31993,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32017,14 +32017,6 @@

All log runs

- -
-
- - -
-
-
@@ -32097,6 +32089,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32113,14 +32113,6 @@

All log runs

- -
-
- - -
-
-
@@ -32193,6 +32185,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32209,14 +32209,6 @@

All log runs

- -
-
- - -
-
-
@@ -32289,6 +32281,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32305,14 +32305,6 @@

All log runs

- -
-
- - -
-
-
@@ -32385,6 +32377,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32401,14 +32401,6 @@

All log runs

- -
-
- - -
-
-
@@ -32481,6 +32473,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32497,14 +32497,6 @@

All log runs

- -
-
- - -
-
-
@@ -32577,6 +32569,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32593,14 +32593,6 @@

All log runs

- -
-
- - -
-
-
@@ -32673,6 +32665,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32689,14 +32689,6 @@

All log runs

- -
-
- - -
-
-
@@ -32769,6 +32761,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32785,14 +32785,6 @@

All log runs

- -
-
- - -
-
-
@@ -32865,6 +32857,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32881,14 +32881,6 @@

All log runs

- -
-
- - -
-
-
@@ -32961,6 +32953,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -32977,14 +32977,6 @@

All log runs

- -
-
- - -
-
-
@@ -33057,6 +33049,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33073,14 +33073,6 @@

All log runs

- -
-
- - -
-
-
@@ -33153,6 +33145,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33169,14 +33169,6 @@

All log runs

- -
-
- - -
-
-
@@ -33249,6 +33241,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33265,14 +33265,6 @@

All log runs

- -
-
- - -
-
-
@@ -33345,6 +33337,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33361,14 +33361,6 @@

All log runs

- -
-
- - -
-
-
@@ -33441,6 +33433,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33457,14 +33457,6 @@

All log runs

- -
-
- - -
-
-
@@ -33537,6 +33529,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33553,14 +33553,6 @@

All log runs

- -
-
- - -
-
-
@@ -33633,6 +33625,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33649,14 +33649,6 @@

All log runs

- -
-
- - -
-
-
@@ -33729,6 +33721,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33745,14 +33745,6 @@

All log runs

- -
-
- - -
-
-
@@ -33825,6 +33817,14 @@

All log runs

+ +
+
+ + +
+
+
@@ -33841,83 +33841,83 @@

All log runs

- +
- +
- +
- +
- +
- +
- -
+
+
- +
- +
- +
- +
- +
- +
- -
+
+
- +
- +
- +
- +
- +
- +
@@ -33937,14 +33937,6 @@

All log runs

- -
-
- - -
-
-
@@ -34017,6 +34009,14 @@

All log runs

+ +
+
+ + +
+
+
diff --git a/logbooks/ABC/index.html b/logbooks/ABC/index.html index 3ddddc8de7..55a4cae7da 100644 --- a/logbooks/ABC/index.html +++ b/logbooks/ABC/index.html @@ -37,6 +37,106 @@ +

2023-11-09

+
+
+
+
+
Duration
+
6 seconds
+
+
+
Start
+
2023-11-09 09:51:28.452090
+
+
+
End
+
2023-11-09 09:51:35.383905
+
+
+
Status code
+
0
+
+
+
Error
+
+
+
+
+
+ +

Run log

+
[09:51:28] Fetching Scraper for: ABC                              handlers.py:23
+           Begin attempting to scrape: ABC                        handlers.py:27
+           Deleting existing data...                                 base.py:239
+[09:51:29] Getting all files in Councillors...                       base.py:191
+           Getting all files in Councillors/json...                  base.py:191
+           ...found 16 files in Councillors/json                     base.py:207
+           Getting all files in Councillors/raw...                   base.py:191
+           ...found 16 files in Councillors/raw                      base.py:207
+           ...found 33 files in Councillors                          base.py:207
+           Deleting batch no. 1 consisting of 33 files               base.py:216
+[09:51:30] ...data deleted.                                          base.py:246
+           Scraping from                                              base.py:42
+           https://www.armaghbanbridgecraigavon.gov.uk/councillors/             
+[09:51:34] Committing batch 1 consisting of 32 files                 base.py:274
+[09:51:35] Finished attempting to scrape: ABC                        base.py:324
+
+ + + + + +

2023-11-08

+
+
+
+
+
Duration
+
6 seconds
+
+
+
Start
+
2023-11-08 08:44:06.787696
+
+
+
End
+
2023-11-08 08:44:13.778074
+
+
+
Status code
+
0
+
+
+
Error
+
+
+
+
+
+ +

Run log

+
[08:44:06] Fetching Scraper for: ABC                              handlers.py:23
+           Begin attempting to scrape: ABC                        handlers.py:27
+[08:44:07] Deleting existing data...                                 base.py:239
+           Getting all files in Councillors...                       base.py:191
+           Getting all files in Councillors/json...                  base.py:191
+[08:44:08] ...found 16 files in Councillors/json                     base.py:207
+           Getting all files in Councillors/raw...                   base.py:191
+           ...found 16 files in Councillors/raw                      base.py:207
+           ...found 33 files in Councillors                          base.py:207
+           Deleting batch no. 1 consisting of 33 files               base.py:216
+[08:44:09] ...data deleted.                                          base.py:246
+           Scraping from                                              base.py:42
+           https://www.armaghbanbridgecraigavon.gov.uk/councillors/             
+[08:44:12] Committing batch 1 consisting of 32 files                 base.py:274
+[08:44:13] Finished attempting to scrape: ABC                        base.py:324
+
+ + + + +

2023-11-07

@@ -934,106 +1034,6 @@

Run log

- - - -

2023-10-20

-
-
-
-
-
Duration
-
6 seconds
-
-
-
Start
-
2023-10-20 08:47:43.344673
-
-
-
End
-
2023-10-20 08:47:50.134775
-
-
-
Status code
-
0
-
-
-
Error
-
-
-
-
-
- -

Run log

-
[08:47:43] Fetching Scraper for: ABC                              handlers.py:23
-           Begin attempting to scrape: ABC                        handlers.py:27
-           Deleting existing data...                                 base.py:239
-[08:47:44] Getting all files in Councillors...                       base.py:191
-           Getting all files in Councillors/json...                  base.py:191
-           ...found 16 files in Councillors/json                     base.py:207
-           Getting all files in Councillors/raw...                   base.py:191
-           ...found 16 files in Councillors/raw                      base.py:207
-           ...found 33 files in Councillors                          base.py:207
-           Deleting batch no. 1 consisting of 33 files               base.py:216
-[08:47:45] ...data deleted.                                          base.py:246
-           Scraping from                                              base.py:42
-           https://www.armaghbanbridgecraigavon.gov.uk/councillors/             
-[08:47:49] Committing batch 1 consisting of 32 files                 base.py:274
-[08:47:50] Finished attempting to scrape: ABC                        base.py:324
-
- - - - - -

2023-10-19

-
-
-
-
-
Duration
-
8 seconds
-
-
-
Start
-
2023-10-19 09:47:53.162222
-
-
-
End
-
2023-10-19 09:48:01.812534
-
-
-
Status code
-
0
-
-
-
Error
-
-
-
-
-
- -

Run log

-
[09:47:53] Fetching Scraper for: ABC                              handlers.py:23
-           Begin attempting to scrape: ABC                        handlers.py:27
-[09:47:55] Deleting existing data...                                 base.py:239
-           Getting all files in Councillors...                       base.py:191
-           Getting all files in Councillors/json...                  base.py:191
-           ...found 16 files in Councillors/json                     base.py:207
-           Getting all files in Councillors/raw...                   base.py:191
-[09:47:56] ...found 16 files in Councillors/raw                      base.py:207
-           ...found 33 files in Councillors                          base.py:207
-           Deleting batch no. 1 consisting of 33 files               base.py:216
-           ...data deleted.                                          base.py:246
-           Scraping from                                              base.py:42
-           https://www.armaghbanbridgecraigavon.gov.uk/councillors/             
-[09:48:00] Committing batch 1 consisting of 32 files                 base.py:274
-[09:48:01] Finished attempting to scrape: ABC                        base.py:324
-
- -