Handle more unreachable states

This commit is contained in:
nemunaire 2022-08-19 16:58:30 +02:00
parent 8b5e6c2a7c
commit 89067be592
2 changed files with 15 additions and 3 deletions

View File

@ -2,6 +2,7 @@ from datetime import datetime, timedelta, timezone
import base64 import base64
import json import json
import os import os
import urllib.error
import urllib.parse import urllib.parse
import urllib.request import urllib.request
import re import re
@ -47,6 +48,8 @@ class SNCFAPI:
fd.write(f.read()) fd.write(f.read())
except ConnectionResetError: except ConnectionResetError:
pass pass
except urllib.error.URLError:
pass
# Retrieve cached data # Retrieve cached data
res = {} res = {}
@ -73,6 +76,8 @@ class SNCFAPI:
fd.write(f.read()) fd.write(f.read())
except ConnectionResetError: except ConnectionResetError:
pass pass
except urllib.error.URLError:
pass
# Retrieve cached data # Retrieve cached data
res = {} res = {}

View File

@ -1,6 +1,7 @@
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
import json import json
import os import os
import urllib.error
import urllib.parse import urllib.parse
import urllib.request import urllib.request
@ -28,9 +29,15 @@ class DarkSkyAPI:
if statinfo is None or datetime.fromtimestamp(statinfo.st_mtime, tz=timezone.utc) + timedelta(hours=1) < datetime.now(tz=timezone.utc): if statinfo is None or datetime.fromtimestamp(statinfo.st_mtime, tz=timezone.utc) + timedelta(hours=1) < datetime.now(tz=timezone.utc):
# Do the request and save it # Do the request and save it
with urllib.request.urlopen(self.baseurl + "/" + str(self.apikey) + "/" + gps + "?" + "&".join([opt+"="+self.opts[opt] for opt in self.opts])) as f: try:
with open(self._cached_file % gps, 'wb') as fd: with urllib.request.urlopen(self.baseurl + "/" + str(self.apikey) + "/" + gps + "?" + "&".join([opt+"="+self.opts[opt] for opt in self.opts])) as f:
fd.write(f.read()) with open(self._cached_file % gps, 'wb') as fd:
fd.write(f.read())
except ConnectionResetError:
pass
except urllib.error.URLError:
pass
# Retrieve cached data # Retrieve cached data
res = {} res = {}