2018-10-23 18:04:25 +00:00
|
|
|
|
#vim: fileencoding=utf-8
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
The proxy server acts as a backend for the wttr.in service.
|
|
|
|
|
|
|
|
|
|
It caches the answers and handles various data sources transforming their
|
|
|
|
|
answers into format supported by the wttr.in service.
|
|
|
|
|
|
|
|
|
|
"""
|
2018-12-02 08:41:29 +00:00
|
|
|
|
from __future__ import print_function
|
2018-10-23 18:04:25 +00:00
|
|
|
|
|
|
|
|
|
from gevent.pywsgi import WSGIServer
|
2018-10-02 21:56:11 +00:00
|
|
|
|
from gevent.monkey import patch_all
|
|
|
|
|
patch_all()
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
# pylint: disable=wrong-import-position,wrong-import-order
|
|
|
|
|
import sys
|
2018-10-02 21:56:11 +00:00
|
|
|
|
import os
|
|
|
|
|
import time
|
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
import cyrtranslit
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
from flask import Flask, request
|
|
|
|
|
APP = Flask(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
MYDIR = os.path.abspath(
|
|
|
|
|
os.path.dirname(os.path.dirname('__file__')))
|
|
|
|
|
sys.path.append("%s/lib/" % MYDIR)
|
|
|
|
|
|
|
|
|
|
from globals import PROXY_CACHEDIR, PROXY_HOST, PROXY_PORT
|
|
|
|
|
from translations import PROXY_LANGS
|
|
|
|
|
# pylint: enable=wrong-import-position
|
|
|
|
|
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_translations():
|
|
|
|
|
"""
|
|
|
|
|
load all translations
|
|
|
|
|
"""
|
|
|
|
|
translations = {}
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
for f_name in PROXY_LANGS:
|
2018-10-02 21:56:11 +00:00
|
|
|
|
f_name = 'share/translations/%s.txt' % f_name
|
|
|
|
|
translation = {}
|
|
|
|
|
lang = f_name.split('/')[-1].split('.', 1)[0]
|
|
|
|
|
with open(f_name, "r") as f_file:
|
|
|
|
|
for line in f_file:
|
|
|
|
|
if ':' not in line:
|
|
|
|
|
continue
|
|
|
|
|
if line.count(':') == 3:
|
|
|
|
|
_, trans, orig, _ = line.strip().split(':', 4)
|
|
|
|
|
else:
|
|
|
|
|
_, trans, orig = line.strip().split(':', 3)
|
|
|
|
|
trans = trans.strip()
|
|
|
|
|
orig = orig.strip()
|
|
|
|
|
|
|
|
|
|
translation[orig] = trans
|
|
|
|
|
translations[lang] = translation
|
|
|
|
|
return translations
|
|
|
|
|
TRANSLATIONS = load_translations()
|
|
|
|
|
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
def _find_srv_for_query(path, query): # pylint: disable=unused-argument
|
2018-10-02 21:56:11 +00:00
|
|
|
|
return 'http://api.worldweatheronline.com/'
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
def _load_content_and_headers(path, query):
|
|
|
|
|
timestamp = time.strftime("%Y%m%d%H", time.localtime())
|
|
|
|
|
cache_file = os.path.join(PROXY_CACHEDIR, timestamp, path, query)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
try:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
return (open(cache_file, 'r').read(),
|
|
|
|
|
json.loads(open(cache_file+".headers", 'r').read()))
|
|
|
|
|
except IOError:
|
2018-10-02 21:56:11 +00:00
|
|
|
|
return None, None
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
def _save_content_and_headers(path, query, content, headers):
|
|
|
|
|
timestamp = time.strftime("%Y%m%d%H", time.localtime())
|
|
|
|
|
cache_file = os.path.join(PROXY_CACHEDIR, timestamp, path, query)
|
|
|
|
|
cache_dir = os.path.dirname(cache_file)
|
|
|
|
|
if not os.path.exists(cache_dir):
|
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
|
open(cache_file + ".headers", 'w').write(json.dumps(headers))
|
|
|
|
|
open(cache_file, 'w').write(content)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
def translate(text, lang):
|
2018-10-23 18:04:25 +00:00
|
|
|
|
"""
|
|
|
|
|
Translate `text` into `lang`
|
|
|
|
|
"""
|
2018-10-02 21:56:11 +00:00
|
|
|
|
translated = TRANSLATIONS.get(lang, {}).get(text, text)
|
|
|
|
|
if text.encode('utf-8') == translated:
|
2018-12-02 08:41:29 +00:00
|
|
|
|
print("%s: %s" % (lang, text))
|
2018-10-02 21:56:11 +00:00
|
|
|
|
return translated
|
|
|
|
|
|
|
|
|
|
def cyr(to_translate):
|
2018-10-23 18:04:25 +00:00
|
|
|
|
"""
|
|
|
|
|
Transliterate `to_translate` from latin into cyrillic
|
|
|
|
|
"""
|
2018-10-02 21:56:11 +00:00
|
|
|
|
return cyrtranslit.to_cyrillic(to_translate)
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
def _patch_greek(original):
|
2018-10-02 21:56:11 +00:00
|
|
|
|
return original.decode('utf-8').replace(u"Ηλιόλουστη/ο", u"Ηλιόλουστη").encode('utf-8')
|
|
|
|
|
|
|
|
|
|
def add_translations(content, lang):
|
2018-10-23 18:04:25 +00:00
|
|
|
|
"""
|
|
|
|
|
Add `lang` translation to `content` (JSON)
|
|
|
|
|
returned by the data source
|
|
|
|
|
"""
|
2018-10-02 21:56:11 +00:00
|
|
|
|
languages_to_translate = TRANSLATIONS.keys()
|
|
|
|
|
try:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
d = json.loads(content) # pylint: disable=invalid-name
|
|
|
|
|
except ValueError as exception:
|
2018-12-02 08:41:29 +00:00
|
|
|
|
print("---")
|
|
|
|
|
print(exception)
|
|
|
|
|
print("---")
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
weather_condition = d['data']['current_condition'][0]['weatherDesc'][0]['value']
|
|
|
|
|
if lang in languages_to_translate:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
|
[{'value': translate(weather_condition, lang)}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
elif lang == 'sr':
|
2018-10-23 18:04:25 +00:00
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
|
[{'value': cyr(
|
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang][0]['value']\
|
|
|
|
|
.encode('utf-8'))}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
elif lang == 'el':
|
2018-10-23 18:04:25 +00:00
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
|
[{'value': _patch_greek(
|
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang][0]['value']\
|
|
|
|
|
.encode('utf-8'))}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
elif lang == 'sr-lat':
|
2018-10-23 18:04:25 +00:00
|
|
|
|
d['data']['current_condition'][0]['lang_%s' % lang] = \
|
|
|
|
|
[{'value':d['data']['current_condition'][0]['lang_sr'][0]['value']\
|
|
|
|
|
.encode('utf-8')}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
fixed_weather = []
|
2018-10-23 18:04:25 +00:00
|
|
|
|
for w in d['data']['weather']: # pylint: disable=invalid-name
|
2018-10-02 21:56:11 +00:00
|
|
|
|
fixed_hourly = []
|
2018-10-23 18:04:25 +00:00
|
|
|
|
for h in w['hourly']: # pylint: disable=invalid-name
|
2018-10-02 21:56:11 +00:00
|
|
|
|
weather_condition = h['weatherDesc'][0]['value']
|
|
|
|
|
if lang in languages_to_translate:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
h['lang_%s' % lang] = \
|
|
|
|
|
[{'value': translate(weather_condition, lang)}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
elif lang == 'sr':
|
2018-10-23 18:04:25 +00:00
|
|
|
|
h['lang_%s' % lang] = \
|
|
|
|
|
[{'value': cyr(h['lang_%s' % lang][0]['value'].encode('utf-8'))}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
elif lang == 'el':
|
2018-10-23 18:04:25 +00:00
|
|
|
|
h['lang_%s' % lang] = \
|
|
|
|
|
[{'value': _patch_greek(h['lang_%s' % lang][0]['value'].encode('utf-8'))}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
elif lang == 'sr-lat':
|
2018-10-23 18:04:25 +00:00
|
|
|
|
h['lang_%s' % lang] = \
|
|
|
|
|
[{'value': h['lang_sr'][0]['value'].encode('utf-8')}]
|
2018-10-02 21:56:11 +00:00
|
|
|
|
fixed_hourly.append(h)
|
|
|
|
|
w['hourly'] = fixed_hourly
|
|
|
|
|
fixed_weather.append(w)
|
2018-10-23 18:04:25 +00:00
|
|
|
|
d['data']['weather'] = fixed_weather
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
content = json.dumps(d)
|
2018-10-23 18:04:25 +00:00
|
|
|
|
except (IndexError, ValueError) as exception:
|
2018-12-02 08:41:29 +00:00
|
|
|
|
print(exception)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
return content
|
|
|
|
|
|
2018-10-23 18:04:25 +00:00
|
|
|
|
@APP.route("/<path:path>")
|
2018-10-02 21:56:11 +00:00
|
|
|
|
def proxy(path):
|
2018-10-23 18:04:25 +00:00
|
|
|
|
"""
|
|
|
|
|
Main proxy function. Handles incoming HTTP queries.
|
|
|
|
|
"""
|
|
|
|
|
|
2018-10-02 21:56:11 +00:00
|
|
|
|
lang = request.args.get('lang', 'en')
|
|
|
|
|
query_string = request.query_string
|
|
|
|
|
query_string = query_string.replace('sr-lat', 'sr')
|
2018-10-23 18:04:25 +00:00
|
|
|
|
content, headers = _load_content_and_headers(path, query_string)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
if content is None:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
srv = _find_srv_for_query(path, query_string)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
url = '%s/%s?%s' % (srv, path, query_string)
|
2018-12-02 08:41:29 +00:00
|
|
|
|
print(url)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
attempts = 5
|
|
|
|
|
while attempts:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
response = requests.get(url, timeout=10)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
try:
|
2018-10-23 18:04:25 +00:00
|
|
|
|
json.loads(response.content)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
break
|
2018-10-23 18:04:25 +00:00
|
|
|
|
except ValueError:
|
2018-10-02 21:56:11 +00:00
|
|
|
|
attempts -= 1
|
|
|
|
|
|
|
|
|
|
headers = {}
|
2018-10-23 18:04:25 +00:00
|
|
|
|
headers['Content-Type'] = response.headers['content-type']
|
|
|
|
|
content = add_translations(response.content, lang)
|
|
|
|
|
_save_content_and_headers(path, query_string, content, headers)
|
2018-10-02 21:56:11 +00:00
|
|
|
|
|
|
|
|
|
return content, 200, headers
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
#app.run(host='0.0.0.0', port=5001, debug=False)
|
|
|
|
|
#app.debug = True
|
2018-10-23 18:04:25 +00:00
|
|
|
|
SERVER = WSGIServer((PROXY_HOST, PROXY_PORT), APP)
|
|
|
|
|
SERVER.serve_forever()
|