diff options
author | Olga Khlopkova <o.khlopkova@corp.mail.ru> | 2021-01-15 17:23:19 +0300 |
---|---|---|
committer | Maksim Andrianov <maksimandrianov1@gmail.com> | 2021-01-15 17:26:47 +0300 |
commit | 355185ecfa299ed4b2aea60a03871fbdfdbb98a1 (patch) | |
tree | b595706814704094f3da159fcca7c0bfa6fd0f8d | |
parent | 0b864576479d39ef13f72dde1a5f9f40b417529c (diff) |
[transit] raise_for_status().
-rw-r--r-- | tools/python/transit/gtfs/download_gtfs.py | 30 |
1 files changed, 13 insertions, 17 deletions
diff --git a/tools/python/transit/gtfs/download_gtfs.py b/tools/python/transit/gtfs/download_gtfs.py index 8f883d086b..94adadce6c 100644 --- a/tools/python/transit/gtfs/download_gtfs.py +++ b/tools/python/transit/gtfs/download_gtfs.py @@ -50,15 +50,7 @@ def parse_transitland_page(url): while retries > 0: try: with requests.get(url) as response: - if response.status_code != 200: - logger.error(f"Failed loading feeds: {response.status_code}") - if response.status_code == 429: - logger.error("Too many requests.") - time.sleep(MAX_SLEEP_TIMEOUT_S) - else: - time.sleep(AVG_SLEEP_TIMEOUT_S) - retries -= 1 - continue + response.raise_for_status() data = json.loads(response.text) if "feeds" in data: @@ -69,9 +61,15 @@ def parse_transitland_page(url): next_page = data["meta"]["next"] if "next" in data["meta"] else "" return gtfs_feeds_urls, next_page except requests.exceptions.RequestException as ex: - logger.error(f"Exception {ex} for url {url}") + logger.error( + f"Exception {ex} while parsing Transitland url {url} with code {response.status_code}" + ) + if response.status_code == 429: + logger.error("Too many requests.") + time.sleep(MAX_SLEEP_TIMEOUT_S) + else: + time.sleep(AVG_SLEEP_TIMEOUT_S) retries -= 1 - time.sleep(AVG_SLEEP_TIMEOUT_S) return [], "" @@ -96,11 +94,7 @@ def load_gtfs_feed_zip(path, url): while retries > 0: try: with requests.get(url, stream=True) as response: - if response.status_code != 200: - logger.error(f"HTTP code {response.status_code} loading gtfs {url}") - retries -= 1 - time.sleep(MAX_SLEEP_TIMEOUT_S) - continue + response.raise_for_status() if not extract_to_path(response.content, path): retries -= 1 @@ -110,7 +104,9 @@ def load_gtfs_feed_zip(path, url): return True except requests.exceptions.RequestException as ex: - logger.error(f"Exception {ex} for url {url}") + logger.error( + f"Exception {ex} downloading zip from url {url}, HTTP code {response.status_code}" + ) retries -= 1 time.sleep(AVG_SLEEP_TIMEOUT_S) |