Fix formatting of bin/proxy.py

pull/1062/head
Igor Chubin 1 month ago
parent 312428bb6e
commit 08bef9006d

@ -1,4 +1,4 @@
#vim: fileencoding=utf-8 # vim: fileencoding=utf-8
""" """
@ -16,6 +16,7 @@ from __future__ import print_function
from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIServer
from gevent.monkey import patch_all from gevent.monkey import patch_all
patch_all() patch_all()
# pylint: disable=wrong-import-position,wrong-import-order # pylint: disable=wrong-import-position,wrong-import-order
@ -29,27 +30,37 @@ import requests
import cyrtranslit import cyrtranslit
from flask import Flask, request from flask import Flask, request
APP = Flask(__name__) APP = Flask(__name__)
MYDIR = os.path.abspath( MYDIR = os.path.abspath(os.path.dirname(os.path.dirname("__file__")))
os.path.dirname(os.path.dirname('__file__')))
sys.path.append("%s/lib/" % MYDIR) sys.path.append("%s/lib/" % MYDIR)
import proxy_log import proxy_log
import globals import globals
from globals import PROXY_CACHEDIR, PROXY_HOST, PROXY_PORT, USE_METNO, USER_AGENT, MISSING_TRANSLATION_LOG from globals import (
PROXY_CACHEDIR,
PROXY_HOST,
PROXY_PORT,
USE_METNO,
USER_AGENT,
MISSING_TRANSLATION_LOG,
)
from metno import create_standard_json_from_metno, metno_request from metno import create_standard_json_from_metno, metno_request
from translations import PROXY_LANGS from translations import PROXY_LANGS
# pylint: enable=wrong-import-position # pylint: enable=wrong-import-position
proxy_logger = proxy_log.LoggerWWO(globals.PROXY_LOG_ACCESS, globals.PROXY_LOG_ERRORS) proxy_logger = proxy_log.LoggerWWO(globals.PROXY_LOG_ACCESS, globals.PROXY_LOG_ERRORS)
def is_testmode(): def is_testmode():
"""Server is running in the wttr.in test mode""" """Server is running in the wttr.in test mode"""
return "WTTRIN_TEST" in os.environ return "WTTRIN_TEST" in os.environ
def load_translations(): def load_translations():
""" """
load all translations load all translations
@ -57,32 +68,37 @@ def load_translations():
translations = {} translations = {}
for f_name in PROXY_LANGS: for f_name in PROXY_LANGS:
f_name = 'share/translations/%s.txt' % f_name f_name = "share/translations/%s.txt" % f_name
translation = {} translation = {}
lang = f_name.split('/')[-1].split('.', 1)[0] lang = f_name.split("/")[-1].split(".", 1)[0]
with open(f_name, "r") as f_file: with open(f_name, "r") as f_file:
for line in f_file: for line in f_file:
if ':' not in line: if ":" not in line:
continue continue
if line.count(':') == 3: if line.count(":") == 3:
_, trans, orig, _ = line.strip().split(':', 4) _, trans, orig, _ = line.strip().split(":", 4)
else: else:
_, trans, orig = line.strip().split(':', 3) _, trans, orig = line.strip().split(":", 3)
trans = trans.strip() trans = trans.strip()
orig = orig.strip() orig = orig.strip()
translation[orig.lower()] = trans translation[orig.lower()] = trans
translations[lang] = translation translations[lang] = translation
return translations return translations
TRANSLATIONS = load_translations() TRANSLATIONS = load_translations()
def _is_metno(): def _is_metno():
return USE_METNO return USE_METNO
def _find_srv_for_query(path, query): # pylint: disable=unused-argument
def _find_srv_for_query(path, query): # pylint: disable=unused-argument
if _is_metno(): if _is_metno():
return 'https://api.met.no' return "https://api.met.no"
return 'http://api.worldweatheronline.com' return "http://api.worldweatheronline.com"
def _cache_file(path, query): def _cache_file(path, query):
"""Return cache file name for specified `path` and `query` """Return cache file name for specified `path` and `query`
@ -94,9 +110,9 @@ def _cache_file(path, query):
digest = hashlib.sha1(("%s %s" % (path, query)).encode("utf-8")).hexdigest() digest = hashlib.sha1(("%s %s" % (path, query)).encode("utf-8")).hexdigest()
digest_number = ord(digest[0].upper()) digest_number = ord(digest[0].upper())
expiry_interval = 60*(digest_number+180) expiry_interval = 60 * (digest_number + 180)
timestamp = "%010d" % (int(time.time())//expiry_interval*expiry_interval) timestamp = "%010d" % (int(time.time()) // expiry_interval * expiry_interval)
filename = os.path.join(PROXY_CACHEDIR, timestamp, path, query) filename = os.path.join(PROXY_CACHEDIR, timestamp, path, query)
return filename return filename
@ -108,25 +124,30 @@ def _load_content_and_headers(path, query):
else: else:
cache_file = _cache_file(path, query) cache_file = _cache_file(path, query)
try: try:
return (open(cache_file, 'r').read(), return (
json.loads(open(cache_file+".headers", 'r').read())) open(cache_file, "r").read(),
json.loads(open(cache_file + ".headers", "r").read()),
)
except IOError: except IOError:
return None, None return None, None
def _touch_empty_file(path, query): def _touch_empty_file(path, query):
cache_file = _cache_file(path, query) cache_file = _cache_file(path, query)
cache_dir = os.path.dirname(cache_file) cache_dir = os.path.dirname(cache_file)
if not os.path.exists(cache_dir): if not os.path.exists(cache_dir):
os.makedirs(cache_dir) os.makedirs(cache_dir)
open(cache_file, 'w').write("") open(cache_file, "w").write("")
def _save_content_and_headers(path, query, content, headers): def _save_content_and_headers(path, query, content, headers):
cache_file = _cache_file(path, query) cache_file = _cache_file(path, query)
cache_dir = os.path.dirname(cache_file) cache_dir = os.path.dirname(cache_file)
if not os.path.exists(cache_dir): if not os.path.exists(cache_dir):
os.makedirs(cache_dir) os.makedirs(cache_dir)
open(cache_file + ".headers", 'w').write(json.dumps(headers)) open(cache_file + ".headers", "w").write(json.dumps(headers))
open(cache_file, 'wb').write(content) open(cache_file, "wb").write(content)
def translate(text, lang): def translate(text, lang):
""" """
@ -137,7 +158,7 @@ def translate(text, lang):
def _log_unknown_translation(lang, text): def _log_unknown_translation(lang, text):
with open(MISSING_TRANSLATION_LOG % lang, "a") as f_missing_translation: with open(MISSING_TRANSLATION_LOG % lang, "a") as f_missing_translation:
f_missing_translation.write(text+"\n") f_missing_translation.write(text + "\n")
if "," in text: if "," in text:
terms = text.split(",") terms = text.split(",")
@ -155,14 +176,17 @@ def translate(text, lang):
translated = TRANSLATIONS.get(lang, {}).get(text.lower(), text) translated = TRANSLATIONS.get(lang, {}).get(text.lower(), text)
return translated return translated
def cyr(to_translate): def cyr(to_translate):
""" """
Transliterate `to_translate` from latin into cyrillic Transliterate `to_translate` from latin into cyrillic
""" """
return cyrtranslit.to_cyrillic(to_translate) return cyrtranslit.to_cyrillic(to_translate)
def _patch_greek(original): def _patch_greek(original):
return original.replace(u"Ηλιόλουστη/ο", u"Ηλιόλουστη") return original.replace("Ηλιόλουστη/ο", "Ηλιόλουστη")
def add_translations(content, lang): def add_translations(content, lang):
""" """
@ -175,7 +199,7 @@ def add_translations(content, lang):
languages_to_translate = TRANSLATIONS.keys() languages_to_translate = TRANSLATIONS.keys()
try: try:
d = json.loads(content) # pylint: disable=invalid-name d = json.loads(content) # pylint: disable=invalid-name
except (ValueError, TypeError) as exception: except (ValueError, TypeError) as exception:
print("---") print("---")
print(exception) print(exception)
@ -183,55 +207,65 @@ def add_translations(content, lang):
return {} return {}
try: try:
weather_condition = d['data']['current_condition' weather_condition = d["data"]["current_condition"][0]["weatherDesc"][0][
][0]['weatherDesc'][0]['value'].capitalize() "value"
d['data']['current_condition'][0]['weatherDesc'][0]['value'] = \ ].capitalize()
weather_condition d["data"]["current_condition"][0]["weatherDesc"][0]["value"] = weather_condition
if lang in languages_to_translate: if lang in languages_to_translate:
d['data']['current_condition'][0]['lang_%s' % lang] = \ d["data"]["current_condition"][0]["lang_%s" % lang] = [
[{'value': translate(weather_condition, lang)}] {"value": translate(weather_condition, lang)}
elif lang == 'sr': ]
d['data']['current_condition'][0]['lang_%s' % lang] = \ elif lang == "sr":
[{'value': cyr( d["data"]["current_condition"][0]["lang_%s" % lang] = [
d['data']['current_condition'][0]['lang_%s' % lang][0]['value']\ {
)}] "value": cyr(
elif lang == 'el': d["data"]["current_condition"][0]["lang_%s" % lang][0]["value"]
d['data']['current_condition'][0]['lang_%s' % lang] = \ )
[{'value': _patch_greek( }
d['data']['current_condition'][0]['lang_%s' % lang][0]['value']\ ]
)}] elif lang == "el":
elif lang == 'sr-lat': d["data"]["current_condition"][0]["lang_%s" % lang] = [
d['data']['current_condition'][0]['lang_%s' % lang] = \ {
[{'value':d['data']['current_condition'][0]['lang_sr'][0]['value']\ "value": _patch_greek(
}] d["data"]["current_condition"][0]["lang_%s" % lang][0]["value"]
)
}
]
elif lang == "sr-lat":
d["data"]["current_condition"][0]["lang_%s" % lang] = [
{"value": d["data"]["current_condition"][0]["lang_sr"][0]["value"]}
]
fixed_weather = [] fixed_weather = []
for w in d['data']['weather']: # pylint: disable=invalid-name for w in d["data"]["weather"]: # pylint: disable=invalid-name
fixed_hourly = [] fixed_hourly = []
for h in w['hourly']: # pylint: disable=invalid-name for h in w["hourly"]: # pylint: disable=invalid-name
weather_condition = h['weatherDesc'][0]['value'] weather_condition = h["weatherDesc"][0]["value"]
if lang in languages_to_translate: if lang in languages_to_translate:
h['lang_%s' % lang] = \ h["lang_%s" % lang] = [
[{'value': translate(weather_condition, lang)}] {"value": translate(weather_condition, lang)}
elif lang == 'sr': ]
h['lang_%s' % lang] = \ elif lang == "sr":
[{'value': cyr(h['lang_%s' % lang][0]['value'])}] h["lang_%s" % lang] = [
elif lang == 'el': {"value": cyr(h["lang_%s" % lang][0]["value"])}
h['lang_%s' % lang] = \ ]
[{'value': _patch_greek(h['lang_%s' % lang][0]['value'])}] elif lang == "el":
elif lang == 'sr-lat': h["lang_%s" % lang] = [
h['lang_%s' % lang] = \ {"value": _patch_greek(h["lang_%s" % lang][0]["value"])}
[{'value': h['lang_sr'][0]['value']}] ]
elif lang == "sr-lat":
h["lang_%s" % lang] = [{"value": h["lang_sr"][0]["value"]}]
fixed_hourly.append(h) fixed_hourly.append(h)
w['hourly'] = fixed_hourly w["hourly"] = fixed_hourly
fixed_weather.append(w) fixed_weather.append(w)
d['data']['weather'] = fixed_weather d["data"]["weather"] = fixed_weather
content = json.dumps(d) content = json.dumps(d)
except (IndexError, ValueError) as exception: except (IndexError, ValueError) as exception:
print(exception) print(exception)
return content return content
def _fetch_content_and_headers(path, query_string, **kwargs): def _fetch_content_and_headers(path, query_string, **kwargs):
content, headers = _load_content_and_headers(path, query_string) content, headers = _load_content_and_headers(path, query_string)
@ -265,7 +299,7 @@ def _fetch_content_and_headers(path, query_string, **kwargs):
_touch_empty_file(path, query_string) _touch_empty_file(path, query_string)
if response: if response:
headers = {} headers = {}
headers['Content-Type'] = response.headers['content-type'] headers["Content-Type"] = response.headers["content-type"]
_save_content_and_headers(path, query_string, response.content, headers) _save_content_and_headers(path, query_string, response.content, headers)
content = response.content content = response.content
else: else:
@ -279,11 +313,13 @@ def _make_query(path, query_string):
if _is_metno(): if _is_metno():
path, query, days = metno_request(path, query_string) path, query, days = metno_request(path, query_string)
if USER_AGENT == '': if USER_AGENT == "":
raise ValueError('User agent must be set to adhere to metno ToS: https://api.met.no/doc/TermsOfService') raise ValueError(
content, headers = _fetch_content_and_headers(path, query, headers={ "User agent must be set to adhere to metno ToS: https://api.met.no/doc/TermsOfService"
'User-Agent': USER_AGENT )
}) content, headers = _fetch_content_and_headers(
path, query, headers={"User-Agent": USER_AGENT}
)
content = create_standard_json_from_metno(content, days) content = create_standard_json_from_metno(content, days)
else: else:
# WWO tweaks # WWO tweaks
@ -293,16 +329,17 @@ def _make_query(path, query_string):
return content, headers return content, headers
@APP.route("/<path:path>") @APP.route("/<path:path>")
def proxy(path): def proxy(path):
""" """
Main proxy function. Handles incoming HTTP queries. Main proxy function. Handles incoming HTTP queries.
""" """
lang = request.args.get('lang', 'en') lang = request.args.get("lang", "en")
query_string = request.query_string.decode("utf-8") query_string = request.query_string.decode("utf-8")
query_string = query_string.replace('sr-lat', 'sr') query_string = query_string.replace("sr-lat", "sr")
query_string = query_string.replace('lang=None', 'lang=en') query_string = query_string.replace("lang=None", "lang=en")
content = "" content = ""
headers = "" headers = ""
@ -314,19 +351,20 @@ def proxy(path):
return content, 200, headers return content, 200, headers
if __name__ == "__main__": if __name__ == "__main__":
#app.run(host='0.0.0.0', port=5001, debug=False) # app.run(host='0.0.0.0', port=5001, debug=False)
#app.debug = True # app.debug = True
if len(sys.argv) == 1: if len(sys.argv) == 1:
bind_addr = "0.0.0.0" bind_addr = "0.0.0.0"
SERVER = WSGIServer((bind_addr, PROXY_PORT), APP) SERVER = WSGIServer((bind_addr, PROXY_PORT), APP)
SERVER.serve_forever() SERVER.serve_forever()
else: else:
print('running single request from command line arg') print("running single request from command line arg")
APP.testing = True APP.testing = True
with APP.test_client() as c: with APP.test_client() as c:
resp = c.get(sys.argv[1]) resp = c.get(sys.argv[1])
print('Status: ' + resp.status) print("Status: " + resp.status)
# print('Headers: ' + dumps(resp.headers)) # print('Headers: ' + dumps(resp.headers))
print(resp.data.decode('utf-8')) print(resp.data.decode("utf-8"))

Loading…
Cancel
Save