|
|
@ -1,4 +1,4 @@
|
|
|
|
#vim: fileencoding=utf-8
|
|
|
|
# vim: fileencoding=utf-8
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
@ -16,6 +16,7 @@ from __future__ import print_function
|
|
|
|
|
|
|
|
|
|
|
|
from gevent.pywsgi import WSGIServer
|
|
|
|
from gevent.pywsgi import WSGIServer
|
|
|
|
from gevent.monkey import patch_all
|
|
|
|
from gevent.monkey import patch_all
|
|
|
|
|
|
|
|
|
|
|
|
patch_all()
|
|
|
|
patch_all()
|
|
|
|
|
|
|
|
|
|
|
|
# pylint: disable=wrong-import-position,wrong-import-order
|
|
|
|
# pylint: disable=wrong-import-position,wrong-import-order
|
|
|
@ -29,16 +30,19 @@ import requests
|
|
|
|
import cyrtranslit
|
|
|
|
import cyrtranslit
|
|
|
|
|
|
|
|
|
|
|
|
from flask import Flask, request
|
|
|
|
from flask import Flask, request
|
|
|
|
APP = Flask(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
APP = Flask(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
MYDIR = os.path.abspath(
|
|
|
|
MYDIR = os.path.abspath(
|
|
|
|
os.path.dirname(os.path.dirname('__file__')))
|
|
|
|
os.path.dirname(os.path.dirname('__file__')))
|
|
|
|
sys.path.append("%s/lib/" % MYDIR)
|
|
|
|
sys.path.append("%s/lib/" % MYDIR)
|
|
|
|
|
|
|
|
|
|
|
|
from globals import PROXY_CACHEDIR, PROXY_HOST, PROXY_PORT, USE_METNO, USER_AGENT, MISSING_TRANSLATION_LOG
|
|
|
|
from globals import PROXY_CACHEDIR, PROXY_HOST, PROXY_PORT, AVAILABLE_DATA_SOURCES, DEFAULT_DATA_SOURCE, USER_AGENT, \
|
|
|
|
|
|
|
|
MISSING_TRANSLATION_LOG
|
|
|
|
from metno import create_standard_json_from_metno, metno_request
|
|
|
|
from metno import create_standard_json_from_metno, metno_request
|
|
|
|
from translations import PROXY_LANGS
|
|
|
|
from translations import PROXY_LANGS
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# pylint: enable=wrong-import-position
|
|
|
|
# pylint: enable=wrong-import-position
|
|
|
|
|
|
|
|
|
|
|
|
def is_testmode():
|
|
|
|
def is_testmode():
|
|
|
@ -46,6 +50,7 @@ def is_testmode():
|
|
|
|
|
|
|
|
|
|
|
|
return "WTTRIN_TEST" in os.environ
|
|
|
|
return "WTTRIN_TEST" in os.environ
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_translations():
|
|
|
|
def load_translations():
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
load all translations
|
|
|
|
load all translations
|
|
|
@ -70,15 +75,16 @@ def load_translations():
|
|
|
|
translation[orig.lower()] = trans
|
|
|
|
translation[orig.lower()] = trans
|
|
|
|
translations[lang] = translation
|
|
|
|
translations[lang] = translation
|
|
|
|
return translations
|
|
|
|
return translations
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TRANSLATIONS = load_translations()
|
|
|
|
TRANSLATIONS = load_translations()
|
|
|
|
|
|
|
|
|
|
|
|
def _is_metno():
|
|
|
|
|
|
|
|
return USE_METNO
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _find_srv_for_query(path, query): # pylint: disable=unused-argument
|
|
|
|
# TODO: Add combined datastream to stop the program using the first item in AVAILABLE_DATA_SOURCES.
|
|
|
|
if _is_metno():
|
|
|
|
def _get_data_source():
|
|
|
|
return 'https://api.met.no'
|
|
|
|
if len(AVAILABLE_DATA_SOURCES) == 0:
|
|
|
|
return 'http://api.worldweatheronline.com'
|
|
|
|
return DEFAULT_DATA_SOURCE["url"]
|
|
|
|
|
|
|
|
return DEFAULT_DATA_SOURCE[0]["url"]
|
|
|
|
|
|
|
|
|
|
|
|
def _cache_file(path, query):
|
|
|
|
def _cache_file(path, query):
|
|
|
|
"""Return cache file name for specified `path` and `query`
|
|
|
|
"""Return cache file name for specified `path` and `query`
|
|
|
@ -90,9 +96,9 @@ def _cache_file(path, query):
|
|
|
|
|
|
|
|
|
|
|
|
digest = hashlib.sha1(("%s %s" % (path, query)).encode("utf-8")).hexdigest()
|
|
|
|
digest = hashlib.sha1(("%s %s" % (path, query)).encode("utf-8")).hexdigest()
|
|
|
|
digest_number = ord(digest[0].upper())
|
|
|
|
digest_number = ord(digest[0].upper())
|
|
|
|
expiry_interval = 60*(digest_number+90)
|
|
|
|
expiry_interval = 60 * (digest_number + 90)
|
|
|
|
|
|
|
|
|
|
|
|
timestamp = "%010d" % (int(time.time())//expiry_interval*expiry_interval)
|
|
|
|
timestamp = "%010d" % (int(time.time()) // expiry_interval * expiry_interval)
|
|
|
|
filename = os.path.join(PROXY_CACHEDIR, timestamp, path, query)
|
|
|
|
filename = os.path.join(PROXY_CACHEDIR, timestamp, path, query)
|
|
|
|
|
|
|
|
|
|
|
|
return filename
|
|
|
|
return filename
|
|
|
@ -105,10 +111,11 @@ def _load_content_and_headers(path, query):
|
|
|
|
cache_file = _cache_file(path, query)
|
|
|
|
cache_file = _cache_file(path, query)
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
return (open(cache_file, 'r').read(),
|
|
|
|
return (open(cache_file, 'r').read(),
|
|
|
|
json.loads(open(cache_file+".headers", 'r').read()))
|
|
|
|
json.loads(open(cache_file + ".headers", 'r').read()))
|
|
|
|
except IOError:
|
|
|
|
except IOError:
|
|
|
|
return None, None
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _touch_empty_file(path, query):
|
|
|
|
def _touch_empty_file(path, query):
|
|
|
|
cache_file = _cache_file(path, query)
|
|
|
|
cache_file = _cache_file(path, query)
|
|
|
|
cache_dir = os.path.dirname(cache_file)
|
|
|
|
cache_dir = os.path.dirname(cache_file)
|
|
|
@ -116,6 +123,7 @@ def _touch_empty_file(path, query):
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
open(cache_file, 'w').write("")
|
|
|
|
open(cache_file, 'w').write("")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _save_content_and_headers(path, query, content, headers):
|
|
|
|
def _save_content_and_headers(path, query, content, headers):
|
|
|
|
cache_file = _cache_file(path, query)
|
|
|
|
cache_file = _cache_file(path, query)
|
|
|
|
cache_dir = os.path.dirname(cache_file)
|
|
|
|
cache_dir = os.path.dirname(cache_file)
|
|
|
@ -124,6 +132,7 @@ def _save_content_and_headers(path, query, content, headers):
|
|
|
|
open(cache_file + ".headers", 'w').write(json.dumps(headers))
|
|
|
|
open(cache_file + ".headers", 'w').write(json.dumps(headers))
|
|
|
|
open(cache_file, 'wb').write(content)
|
|
|
|
open(cache_file, 'wb').write(content)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def translate(text, lang):
|
|
|
|
def translate(text, lang):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
Translate `text` into `lang`.
|
|
|
|
Translate `text` into `lang`.
|
|
|
@ -133,7 +142,7 @@ def translate(text, lang):
|
|
|
|
|
|
|
|
|
|
|
|
def _log_unknown_translation(lang, text):
|
|
|
|
def _log_unknown_translation(lang, text):
|
|
|
|
with open(MISSING_TRANSLATION_LOG % lang, "a") as f_missing_translation:
|
|
|
|
with open(MISSING_TRANSLATION_LOG % lang, "a") as f_missing_translation:
|
|
|
|
f_missing_translation.write(text+"\n")
|
|
|
|
f_missing_translation.write(text + "\n")
|
|
|
|
|
|
|
|
|
|
|
|
if "," in text:
|
|
|
|
if "," in text:
|
|
|
|
terms = text.split(",")
|
|
|
|
terms = text.split(",")
|
|
|
@ -151,15 +160,18 @@ def translate(text, lang):
|
|
|
|
translated = TRANSLATIONS.get(lang, {}).get(text.lower(), text)
|
|
|
|
translated = TRANSLATIONS.get(lang, {}).get(text.lower(), text)
|
|
|
|
return translated
|
|
|
|
return translated
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cyr(to_translate):
|
|
|
|
def cyr(to_translate):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
Transliterate `to_translate` from latin into cyrillic
|
|
|
|
Transliterate `to_translate` from latin into cyrillic
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
return cyrtranslit.to_cyrillic(to_translate)
|
|
|
|
return cyrtranslit.to_cyrillic(to_translate)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _patch_greek(original):
|
|
|
|
def _patch_greek(original):
|
|
|
|
return original.replace(u"Ηλιόλουστη/ο", u"Ηλιόλουστη")
|
|
|
|
return original.replace(u"Ηλιόλουστη/ο", u"Ηλιόλουστη")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def add_translations(content, lang):
|
|
|
|
def add_translations(content, lang):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
Add `lang` translation to `content` (JSON)
|
|
|
|
Add `lang` translation to `content` (JSON)
|
|
|
@ -171,7 +183,7 @@ def add_translations(content, lang):
|
|
|
|
|
|
|
|
|
|
|
|
languages_to_translate = TRANSLATIONS.keys()
|
|
|
|
languages_to_translate = TRANSLATIONS.keys()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
d = json.loads(content) # pylint: disable=invalid-name
|
|
|
|
d = json.loads(content) # pylint: disable=invalid-name
|
|
|
|
except (ValueError, TypeError) as exception:
|
|
|
|
except (ValueError, TypeError) as exception:
|
|
|
|
print("---")
|
|
|
|
print("---")
|
|
|
|
print(exception)
|
|
|
|
print(exception)
|
|
|
@ -180,7 +192,7 @@ def add_translations(content, lang):
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
weather_condition = d['data']['current_condition'
|
|
|
|
weather_condition = d['data']['current_condition'
|
|
|
|
][0]['weatherDesc'][0]['value'].capitalize()
|
|
|
|
][0]['weatherDesc'][0]['value'].capitalize()
|
|
|
|
d['data']['current_condition'][0]['weatherDesc'][0]['value'] = \
|
|
|
|
d['data']['current_condition'][0]['weatherDesc'][0]['value'] = \
|
|
|
|
weather_condition
|
|
|
|
weather_condition
|
|
|
|
if lang in languages_to_translate:
|
|
|
|
if lang in languages_to_translate:
|
|
|
@ -189,22 +201,22 @@ def add_translations(content, lang):
|
|
|
|
elif lang == 'sr':
|
|
|
|
elif lang == 'sr':
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
[{'value': cyr(
|
|
|
|
[{'value': cyr(
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang][0]['value']\
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang][0]['value'] \
|
|
|
|
)}]
|
|
|
|
)}]
|
|
|
|
elif lang == 'el':
|
|
|
|
elif lang == 'el':
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
[{'value': _patch_greek(
|
|
|
|
[{'value': _patch_greek(
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang][0]['value']\
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang][0]['value'] \
|
|
|
|
)}]
|
|
|
|
)}]
|
|
|
|
elif lang == 'sr-lat':
|
|
|
|
elif lang == 'sr-lat':
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
[{'value':d['data']['current_condition'][0]['lang_sr'][0]['value']\
|
|
|
|
[{'value': d['data']['current_condition'][0]['lang_sr'][0]['value'] \
|
|
|
|
}]
|
|
|
|
}]
|
|
|
|
|
|
|
|
|
|
|
|
fixed_weather = []
|
|
|
|
fixed_weather = []
|
|
|
|
for w in d['data']['weather']: # pylint: disable=invalid-name
|
|
|
|
for w in d['data']['weather']: # pylint: disable=invalid-name
|
|
|
|
fixed_hourly = []
|
|
|
|
fixed_hourly = []
|
|
|
|
for h in w['hourly']: # pylint: disable=invalid-name
|
|
|
|
for h in w['hourly']: # pylint: disable=invalid-name
|
|
|
|
weather_condition = h['weatherDesc'][0]['value']
|
|
|
|
weather_condition = h['weatherDesc'][0]['value']
|
|
|
|
if lang in languages_to_translate:
|
|
|
|
if lang in languages_to_translate:
|
|
|
|
h['lang_%s' % lang] = \
|
|
|
|
h['lang_%s' % lang] = \
|
|
|
@ -228,11 +240,12 @@ def add_translations(content, lang):
|
|
|
|
print(exception)
|
|
|
|
print(exception)
|
|
|
|
return content
|
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _fetch_content_and_headers(path, query_string, **kwargs):
|
|
|
|
def _fetch_content_and_headers(path, query_string, **kwargs):
|
|
|
|
content, headers = _load_content_and_headers(path, query_string)
|
|
|
|
content, headers = _load_content_and_headers(path, query_string)
|
|
|
|
|
|
|
|
|
|
|
|
if content is None:
|
|
|
|
if content is None:
|
|
|
|
srv = _find_srv_for_query(path, query_string)
|
|
|
|
srv = _get_data_source()
|
|
|
|
url = '%s/%s?%s' % (srv, path, query_string)
|
|
|
|
url = '%s/%s?%s' % (srv, path, query_string)
|
|
|
|
|
|
|
|
|
|
|
|
attempts = 10
|
|
|
|
attempts = 10
|
|
|
@ -274,13 +287,13 @@ def proxy(path):
|
|
|
|
query_string = query_string.replace('lang=None', 'lang=en')
|
|
|
|
query_string = query_string.replace('lang=None', 'lang=en')
|
|
|
|
content = ""
|
|
|
|
content = ""
|
|
|
|
headers = ""
|
|
|
|
headers = ""
|
|
|
|
if _is_metno():
|
|
|
|
if _get_data_source():
|
|
|
|
path, query, days = metno_request(path, query_string)
|
|
|
|
path, query, days = metno_request(path, query_string)
|
|
|
|
if USER_AGENT == '':
|
|
|
|
if USER_AGENT == '':
|
|
|
|
raise ValueError('User agent must be set to adhere to metno ToS: https://api.met.no/doc/TermsOfService')
|
|
|
|
raise ValueError('User agent must be set to adhere to metno ToS: https://api.met.no/doc/TermsOfService')
|
|
|
|
content, headers = _fetch_content_and_headers(path, query, headers={
|
|
|
|
content, headers = _fetch_content_and_headers(path, query, headers={
|
|
|
|
'User-Agent': USER_AGENT
|
|
|
|
'User-Agent': USER_AGENT
|
|
|
|
})
|
|
|
|
})
|
|
|
|
content = create_standard_json_from_metno(content, days)
|
|
|
|
content = create_standard_json_from_metno(content, days)
|
|
|
|
else:
|
|
|
|
else:
|
|
|
|
# WWO tweaks
|
|
|
|
# WWO tweaks
|
|
|
@ -292,9 +305,10 @@ def proxy(path):
|
|
|
|
|
|
|
|
|
|
|
|
return content, 200, headers
|
|
|
|
return content, 200, headers
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
if __name__ == "__main__":
|
|
|
|
#app.run(host='0.0.0.0', port=5001, debug=False)
|
|
|
|
# app.run(host='0.0.0.0', port=5001, debug=False)
|
|
|
|
#app.debug = True
|
|
|
|
# app.debug = True
|
|
|
|
if len(sys.argv) == 1:
|
|
|
|
if len(sys.argv) == 1:
|
|
|
|
bind_addr = "0.0.0.0"
|
|
|
|
bind_addr = "0.0.0.0"
|
|
|
|
SERVER = WSGIServer((bind_addr, PROXY_PORT), APP)
|
|
|
|
SERVER = WSGIServer((bind_addr, PROXY_PORT), APP)
|
|
|
|