From 0ac8d21284b6409a1cef934ca949d8ab561e5d28 Mon Sep 17 00:00:00 2001 From: simon Date: Wed, 7 Jul 2021 19:08:46 +0700 Subject: [PATCH] major app rebuild into a single flask application --- .gitignore | 4 +- backend/flask/.dockerignore | 2 - backend/flask/Dockerfile | 16 - backend/flask/app/__init__.py | 5 - backend/flask/app/aqi_parser.py | 88 ---- backend/flask/app/db_connect.py | 71 --- backend/flask/app/graph.py | 223 -------- backend/flask/app/graph_monthly.py | 215 -------- backend/flask/app/graph_pm.py | 187 ------- backend/flask/app/table_export.py | 140 ----- backend/flask/app/views.py | 124 ----- backend/flask/app/weather.py | 48 -- backend/flask/config.sample | 14 - backend/flask/run.py | 26 - deploy.sh | 9 +- docker-compose.yml | 42 +- frontend/nginx.conf | 23 - frontend/nginx/about/index.php | 92 ---- frontend/nginx/css/style.css | 476 ----------------- frontend/nginx/graphs/index.php | 147 ------ frontend/nginx/incl/footer.html | 5 - frontend/nginx/incl/topnav.php | 18 - frontend/nginx/index.php | 129 ----- frontend/nginx/monthly/index.php | 56 -- {backend/nginx => nginx}/Dockerfile | 2 +- {backend/nginx => nginx}/nginx.conf | 0 web/Dockerfile | 19 + web/config.json.sample | 17 + {backend/flask => web}/requirements.txt | 5 +- web/src/__init__.py | 0 web/src/db.py | 245 +++++++++ web/src/graph_current.py | 114 +++++ web/src/graph_monthly.py | 219 ++++++++ web/src/graph_nightly.py | 483 ++++++++++++++++++ web/src/helper.py | 78 +++ web/start.sh | 3 + .../nginx => web/static}/css/lightbox.css | 0 web/static/css/style.css | 365 +++++++++++++ {frontend/nginx => web/static}/favicon.ico | Bin {frontend/nginx => web/static}/img/cloud.png | Bin .../nginx => web/static}/img/cloud_colors.gif | Bin .../nginx => web/static}/img/icon/000.png | Bin .../nginx => web/static}/img/icon/01d.png | Bin .../nginx => web/static}/img/icon/01n.png | Bin .../nginx => web/static}/img/icon/02d.png | Bin .../nginx => web/static}/img/icon/02n.png | Bin .../nginx => web/static}/img/icon/03d.png | Bin .../nginx => web/static}/img/icon/03n.png | Bin .../nginx => web/static}/img/icon/04d.png | Bin .../nginx => web/static}/img/icon/04n.png | Bin .../nginx => web/static}/img/icon/09d.png | Bin .../nginx => web/static}/img/icon/09n.png | Bin .../nginx => web/static}/img/icon/10d.png | Bin .../nginx => web/static}/img/icon/10n.png | Bin .../nginx => web/static}/img/icon/11d.png | Bin .../nginx => web/static}/img/icon/11n.png | Bin .../nginx => web/static}/img/icon/13d.png | Bin .../nginx => web/static}/img/icon/13n.png | Bin .../nginx => web/static}/img/icon/50d.png | Bin .../nginx => web/static}/img/icon/50n.png | Bin .../static}/img/icon/category-good.png | Bin .../static}/img/icon/category-hazardous.png | Bin .../static}/img/icon/category-moderate.png | Bin .../static}/img/icon/category-plain.png | Bin .../static}/img/icon/category-unhealthy.png | Bin .../category-unhealthyforsensitivegroups.png | Bin .../img/icon/category-veryunhealthy.png | Bin .../static}/img/icon/humidity.png | Bin .../static}/img/icon/pressure.png | Bin .../nginx => web/static}/img/icon/wind.png | Bin .../static}/img/lightbox/close.png | Bin .../static}/img/lightbox/loading.gif | Bin .../static}/img/lightbox/next.png | Bin .../static}/img/lightbox/prev.png | Bin .../static}/img/social_preview.jpg | Bin {frontend/nginx => web/static}/js/aqi.js | 18 +- {frontend/nginx => web/static}/js/lightbox.js | 0 web/templates/about.html | 55 ++ web/templates/graphs.html | 103 ++++ web/templates/home.html | 103 ++++ web/templates/layout.html | 53 ++ web/templates/monthly.html | 22 + backend/flask/app.ini => web/uwsgi.ini | 4 +- web/views.py | 114 +++++ 84 files changed, 2021 insertions(+), 2161 deletions(-) delete mode 100644 backend/flask/.dockerignore delete mode 100644 backend/flask/Dockerfile delete mode 100644 backend/flask/app/__init__.py delete mode 100644 backend/flask/app/aqi_parser.py delete mode 100644 backend/flask/app/db_connect.py delete mode 100644 backend/flask/app/graph.py delete mode 100644 backend/flask/app/graph_monthly.py delete mode 100644 backend/flask/app/graph_pm.py delete mode 100644 backend/flask/app/table_export.py delete mode 100644 backend/flask/app/views.py delete mode 100755 backend/flask/app/weather.py delete mode 100644 backend/flask/config.sample delete mode 100644 backend/flask/run.py delete mode 100644 frontend/nginx.conf delete mode 100644 frontend/nginx/about/index.php delete mode 100644 frontend/nginx/css/style.css delete mode 100644 frontend/nginx/graphs/index.php delete mode 100644 frontend/nginx/incl/footer.html delete mode 100644 frontend/nginx/incl/topnav.php delete mode 100644 frontend/nginx/index.php delete mode 100644 frontend/nginx/monthly/index.php rename {backend/nginx => nginx}/Dockerfile (79%) rename {backend/nginx => nginx}/nginx.conf (100%) create mode 100644 web/Dockerfile create mode 100644 web/config.json.sample rename {backend/flask => web}/requirements.txt (63%) create mode 100644 web/src/__init__.py create mode 100644 web/src/db.py create mode 100644 web/src/graph_current.py create mode 100644 web/src/graph_monthly.py create mode 100644 web/src/graph_nightly.py create mode 100644 web/src/helper.py create mode 100644 web/start.sh rename {frontend/nginx => web/static}/css/lightbox.css (100%) create mode 100644 web/static/css/style.css rename {frontend/nginx => web/static}/favicon.ico (100%) rename {frontend/nginx => web/static}/img/cloud.png (100%) rename {frontend/nginx => web/static}/img/cloud_colors.gif (100%) rename {frontend/nginx => web/static}/img/icon/000.png (100%) rename {frontend/nginx => web/static}/img/icon/01d.png (100%) rename {frontend/nginx => web/static}/img/icon/01n.png (100%) rename {frontend/nginx => web/static}/img/icon/02d.png (100%) rename {frontend/nginx => web/static}/img/icon/02n.png (100%) rename {frontend/nginx => web/static}/img/icon/03d.png (100%) rename {frontend/nginx => web/static}/img/icon/03n.png (100%) rename {frontend/nginx => web/static}/img/icon/04d.png (100%) rename {frontend/nginx => web/static}/img/icon/04n.png (100%) rename {frontend/nginx => web/static}/img/icon/09d.png (100%) rename {frontend/nginx => web/static}/img/icon/09n.png (100%) rename {frontend/nginx => web/static}/img/icon/10d.png (100%) rename {frontend/nginx => web/static}/img/icon/10n.png (100%) rename {frontend/nginx => web/static}/img/icon/11d.png (100%) rename {frontend/nginx => web/static}/img/icon/11n.png (100%) rename {frontend/nginx => web/static}/img/icon/13d.png (100%) rename {frontend/nginx => web/static}/img/icon/13n.png (100%) rename {frontend/nginx => web/static}/img/icon/50d.png (100%) rename {frontend/nginx => web/static}/img/icon/50n.png (100%) rename {frontend/nginx => web/static}/img/icon/category-good.png (100%) rename {frontend/nginx => web/static}/img/icon/category-hazardous.png (100%) rename {frontend/nginx => web/static}/img/icon/category-moderate.png (100%) rename {frontend/nginx => web/static}/img/icon/category-plain.png (100%) rename {frontend/nginx => web/static}/img/icon/category-unhealthy.png (100%) rename {frontend/nginx => web/static}/img/icon/category-unhealthyforsensitivegroups.png (100%) rename {frontend/nginx => web/static}/img/icon/category-veryunhealthy.png (100%) rename {frontend/nginx => web/static}/img/icon/humidity.png (100%) rename {frontend/nginx => web/static}/img/icon/pressure.png (100%) rename {frontend/nginx => web/static}/img/icon/wind.png (100%) rename {frontend/nginx => web/static}/img/lightbox/close.png (100%) rename {frontend/nginx => web/static}/img/lightbox/loading.gif (100%) rename {frontend/nginx => web/static}/img/lightbox/next.png (100%) rename {frontend/nginx => web/static}/img/lightbox/prev.png (100%) rename {frontend/nginx => web/static}/img/social_preview.jpg (100%) rename {frontend/nginx => web/static}/js/aqi.js (94%) rename {frontend/nginx => web/static}/js/lightbox.js (100%) create mode 100644 web/templates/about.html create mode 100644 web/templates/graphs.html create mode 100644 web/templates/home.html create mode 100644 web/templates/layout.html create mode 100644 web/templates/monthly.html rename backend/flask/app.ini => web/uwsgi.ini (74%) create mode 100644 web/views.py diff --git a/.gitignore b/.gitignore index b57792f..1cb234b 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ __pycache__ # protect real config files -config +config.json config.h # env files @@ -12,6 +12,4 @@ postgres.env *.ttf # dynamic files -**/dyn/*.json **/dyn/ -update.html diff --git a/backend/flask/.dockerignore b/backend/flask/.dockerignore deleted file mode 100644 index 3cbd4f5..0000000 --- a/backend/flask/.dockerignore +++ /dev/null @@ -1,2 +0,0 @@ -__pycache__ -/dyn/* \ No newline at end of file diff --git a/backend/flask/Dockerfile b/backend/flask/Dockerfile deleted file mode 100644 index 3dc546f..0000000 --- a/backend/flask/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -# Use the Python3.7.2 image -FROM python - -# Set the working directory to /app -WORKDIR /app - -VOLUME /app/dyn - -# Copy the current directory contents into the container at /app -ADD . /app - -# Install the dependencies -RUN pip install -r requirements.txt - -# run the command to start uWSGI -CMD ["uwsgi", "app.ini"] diff --git a/backend/flask/app/__init__.py b/backend/flask/app/__init__.py deleted file mode 100644 index 7b1c6f4..0000000 --- a/backend/flask/app/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from flask import Flask - -app = Flask(__name__) - -from app import views diff --git a/backend/flask/app/aqi_parser.py b/backend/flask/app/aqi_parser.py deleted file mode 100644 index 327861c..0000000 --- a/backend/flask/app/aqi_parser.py +++ /dev/null @@ -1,88 +0,0 @@ -import json -from datetime import datetime -import numpy as np - - -def input_process(data): - """ - parsing aqi post data and combine it with weather data - return: dict of combined values - """ - # error check - error_found = False - # get weather data - try: - with open('dyn/weather.json', 'r') as f: - weather_data = f.read() - weather_data_json = json.loads(weather_data) - del weather_data_json['timestamp'] - del weather_data_json['epoch_time'] - except FileNotFoundError: - # will get recreated on next run - weather_data_json = {} - # parse aqi data - json_dict = data - pm25 = json_dict['pm25'] - aqi, aqi_category = get_AQI(pm25) - json_dict['aqi_value'] = float(aqi) - json_dict['aqi_category'] = aqi_category - if pm25 == 0: - # something went wrong - error_found = True - # set timestamp - now = datetime.now() - timestamp = now.strftime("%Y-%m-%d %H:%M:%S") - epoch_time = int(now.strftime('%s')) - json_dict['timestamp'] = timestamp - json_dict['epoch_time'] = epoch_time - # combine the two and return - json_dict.update(weather_data_json) - return json_dict, error_found - - -def get_AQI(pm25): - """ takes the pm2.5 value and returns AQI and AQI category """ - if pm25 <= 12: - aqi = (pm25 / 12) * 50 - aqi_category = "Good" - elif pm25 > 12 and pm25 <= 35.4: - perc = (pm25 - 12) / (35.4 - 12) - aqi = (100 - 50) * perc + 50 - aqi_category = "Moderate" - elif pm25 > 35.4 and pm25 <= 55.4: - perc = (pm25 - 35.4) / (55.4 - 35.4) - aqi = (150 - 100) * perc + 100 - aqi_category = "Unhealthy for Sensitive Groups" - elif pm25 > 55.4 and pm25 <= 150.4: - perc = (pm25 - 55.4) / (150.4 - 55.4) - aqi = (200 - 150) * perc + 150 - aqi_category = "Unhealthy" - elif pm25 > 150.4 and pm25 <= 199.9: - perc = (pm25 - 150.4) / (199.9 - 150.4) - aqi = (250 - 200) * perc + 200 - aqi_category = "Very Unhealthy" - elif pm25 > 199.9 and pm25 <= 250.4: - perc = (pm25 - 199.9) / (250.4 - 199.9) - aqi = (300 - 250) * perc + 250 - aqi_category = "Very Unhealthy" - elif pm25 > 250.4 and pm25 <= 299.9: - perc = (pm25 - 250.4) / (299.9 - 250.4) - aqi = (350 - 300) * perc + 300 - aqi_category = "Hazardous" - elif pm25 > 299.9 and pm25 <= 350.4: - perc = (pm25 - 299.9) / (350.4 - 299.9) - aqi = (400 - 350) * perc + 350 - aqi_category = "Hazardous" - elif pm25 > 350.4 and pm25 <= 424.6: - perc = (pm25 - 350.4) / (424.6 - 350.4) - aqi = (450 - 400) * perc + 400 - aqi_category = "Hazardous" - elif pm25 > 424.6 and pm25 <= 500.4: - perc = (pm25 - 424.6) / (500.4 - 424.6) - aqi = (500 - 450) * perc + 450 - aqi_category = "Hazardous" - elif pm25 > 500.4: - aqi = pm25 - aqi_category = "Hazardous" - aqi = np.round_(int(aqi), decimals=0, out=None) - return aqi, aqi_category diff --git a/backend/flask/app/db_connect.py b/backend/flask/app/db_connect.py deleted file mode 100644 index 7a4984a..0000000 --- a/backend/flask/app/db_connect.py +++ /dev/null @@ -1,71 +0,0 @@ -""" handles insert into postgres db """ - -import psycopg2 - - -def db_connect(config): - """ returns connection and curser """ - # set config - db_host = config['db_host'] - db_database = config['db_database'] - db_user = config['db_user'] - db_password = config['db_password'] - # Connect to database - conn = psycopg2.connect( - host = db_host, - database = db_database, - user = db_user, - password = db_password - ) - # Open a cursor to perform database operations - cur = conn.cursor() - return conn, cur - - -def db_close(conn, cur): - """ clean close the conn and curser """ - conn.commit() - cur.close() - conn.close() - - -def db_insert(config, json_dict): - """ make the db insert """ - # read out data dict - uptime = json_dict['uptime'] - temperature = json_dict['temperature'] - pressure = json_dict['pressure'] - humidity = json_dict['humidity'] - pm25 = json_dict['pm25'] - pm10 = json_dict['pm10'] - aqi_value = json_dict['aqi_value'] - aqi_category = json_dict['aqi_category'] - time_stamp = json_dict['timestamp'] - epoch_time = json_dict['epoch_time'] - weather_name = json_dict['weather_name'] - weather_icon = json_dict['weather_icon'] - wind_speed = json_dict['wind_speed'] - wind_direction = json_dict['wind_direction'] - sensor_id = json_dict['sensor_id'] - - # connect - conn, cur = db_connect(config) - # insert aqi - cur.execute("INSERT INTO aqi \ - (epoch_time, sensor_id, time_stamp, uptime, pm25, pm10, aqi_value, aqi_category) \ - VALUES (%s, %s, %s, %s, %s, %s, %s, %s)", - (epoch_time, sensor_id, time_stamp, uptime, pm25, pm10, aqi_value, aqi_category) - ) - # insert weather - cur.execute("INSERT INTO weather \ - (epoch_time, sensor_id, time_stamp, temperature, pressure, humidity, \ - wind_speed, wind_direction, weather_name, weather_icon) \ - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", - (epoch_time, sensor_id, time_stamp, temperature, pressure, humidity, - wind_speed, wind_direction, weather_name, weather_icon) - ) - - # close - db_close(conn, cur) - - return time_stamp diff --git a/backend/flask/app/graph.py b/backend/flask/app/graph.py deleted file mode 100644 index 9e09c02..0000000 --- a/backend/flask/app/graph.py +++ /dev/null @@ -1,223 +0,0 @@ -""" makes the nice plots """ - -from datetime import datetime, timedelta - -from matplotlib import pyplot as plt -import numpy as np -import pandas as pd -import psycopg2 - -from app.db_connect import db_connect, db_close - - -def create_current(config): - """ recreate current graph """ - # last three hours - now = datetime.now() - now_human = now.strftime('%c') - now_epoch = int(now.strftime('%s')) - last_3h = now_epoch - 3 * 60 * 60 - last_3h_limit = int(60 * 3) - # connect - conn, cur = db_connect(config) - # get data - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {last_3h} ORDER BY epoch_time DESC \ - LIMIT {last_3h_limit};') - rows = cur.fetchall() - # close db - db_close(conn, cur) - # set title - time_from = datetime.fromtimestamp(rows[-1][0]).strftime('%H:%M') - time_until = datetime.fromtimestamp(rows[0][0]).strftime('%H:%M') - plt_title = f'AQI values last 3h: {time_from} - {time_until}' - # parse rows - sample_rate = '3min' - x, y = build_plt(rows, sample_rate, '%H:%M') - # calc x_ticks - x_ticks = [] - for num, i in enumerate(x): - minute = int(i.split(':')[1]) - if minute % 15 == 0: - x_ticks.append(num) - # write plt - file_name = 'current' - write_plt(x, y, plt_title, x_ticks, file_name) - message = f'recreated current graph: {now_human}' - print(message) - - -def rebuild_3days(config): - """ wrapper to recreate all three days of graphs """ - now = datetime.now() - # get axis - x_1, y_1, plt_title_1, x_ticks_1 = get_axis(1, now, config) - x_2, y_2, plt_title_2, x_ticks_2 = get_axis(2, now, config) - x_3, y_3, plt_title_3, x_ticks_3 = get_axis(3, now, config) - # set max - y_max = max(y_1.append(y_2).append(y_3)) + 50 - # write plot - write_plt(x_1, y_1, plt_title_1, x_ticks_1, 'day-1', y_max) - write_plt(x_2, y_2, plt_title_2, x_ticks_2, 'day-2', y_max) - write_plt(x_3, y_3, plt_title_3, x_ticks_3, 'day-3', y_max) - print('recreaded last three days plt') - - -def get_axis(day, now, config): - """ recreate plot for single days """ - day_delta = now.date() - timedelta(days = day) - day_from = int(day_delta.strftime('%s')) - day_until = int(day_delta.strftime('%s')) + 60 * 60 * 24 - # make the SELECT - conn, cur = db_connect(config) - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {day_from} \ - AND epoch_time < {day_until} \ - ORDER BY epoch_time DESC LIMIT 720;' - ) - rows = cur.fetchall() - db_close(conn, cur) - # title - time_stamp = day_delta.strftime('%Y-%m-%d') - plt_title = f'AQI values from: {time_stamp}' - # build plt - x_ticks = np.arange(0, 97, step=8) - sample_rate = '15min' - x, y = build_plt(rows, sample_rate, '%H:%M') - return x, y, plt_title, x_ticks - - -def rebuild_7days(config): - """ recreate last-7 days from db """ - # setup - now = datetime.now() - day_until = int(now.date().strftime('%s')) - day_from = day_until - 7 * 24 * 60 * 60 - # get data - conn, cur = db_connect(config) - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {day_from} \ - AND epoch_time < {day_until} \ - ORDER BY epoch_time DESC LIMIT 30 * 24 * 7;' - ) - rows = cur.fetchall() - db_close(conn, cur) - # title - date_from = datetime.fromtimestamp(rows[-1][0]).strftime('%d %b') - date_until = datetime.fromtimestamp(rows[0][0]).strftime('%d %b') - plt_title = f'AQI values from: {date_from} until {date_until}' - # build axis of plot - x, y_1, y_2 = build_last7_plt(rows) - # make ticks - x_range = np.arange(0, 84, step=12) - x_date_time = pd.to_datetime(x).dt.date.unique() - x_dates = np.asarray([i.strftime('%d %b') for i in x_date_time]) - x_ticks = x_range, x_dates - # write the plot - write_last7_plt(x, y_1, y_2, x_ticks, plt_title) - print('recreaded last-7 days graph') - - -def build_plt(rows, sample_rate, time_format): - """ parse rows returns axis""" - # build x y - x_timeline = [datetime.fromtimestamp(i[0]) for i in rows] - y_aqi_values = [int(i[1]) for i in rows] - # build dataframe - data = {'timestamp': x_timeline, 'aqi': y_aqi_values} - df = pd.DataFrame(data) - # reindex as timeseries - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - mean = indexed.resample(sample_rate).mean() - mean.interpolate(method='linear', limit=1, inplace=True, limit_area='inside') - mean.reset_index(level=0, inplace=True) - mean['timestamp'] = mean['timestamp'].dt.strftime(time_format) - mean['aqi'] = mean['aqi'].round() - # set axis - x = mean['timestamp'] - y = mean['aqi'] - return x, y - - -def build_last7_plt(rows): - """ build axis for last7 plot """ - sample_rate = '2h' - # build x y - x_timeline = [datetime.fromtimestamp(i[0]) for i in rows] - y_aqi_values = [int(i[1]) for i in rows] - # build dataframe - data = {'timestamp': x_timeline, 'aqi': y_aqi_values} - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - mean = indexed.resample(sample_rate).mean() - mean['avg'] = mean['aqi'].resample('1d').mean() - mean['avg'] = mean.avg.shift(6) - - mean['avg'][0] = (mean['avg'].iloc[6] + mean['aqi'][0]) / 2 - mean['avg'][-1] = (mean['avg'].iloc[-6] + mean['aqi'][-1]) / 2 - - mean['avg'].interpolate(method='polynomial', order=3, inplace=True) - mean.reset_index(level=0, inplace=True) - mean['timestamp'] = mean['timestamp'].dt.strftime('%Y-%m-%d %H:%M') - mean['aqi'] = mean['aqi'].round() - mean['avg'] = mean['avg'].round() - x = mean['timestamp'] - y_1 = mean['aqi'] - y_2 = mean['avg'] - return x, y_1, y_2 - - -def write_plt(x, y, plt_title, x_ticks, file_name, y_max=''): - """ save plot to file """ - # calc ticks - if not y_max: - y_max = np.ceil(y.max()/50)*50 + 50 - # setup plot - plt.style.use('seaborn') - plt.plot(x, y, color='#313131',) - plt.fill_between(x, y, y2=0, where=(y > 0), color='#85a762', interpolate=True) # good - plt.fill_between(x, y, y2=50, where=(y > 50), color='#d4b93c', interpolate=True) # moderate - plt.fill_between(x, y, y2=100, where=(y > 100), color='#e96843', interpolate=True) # ufsg - plt.fill_between(x, y, y2=150, where=(y > 150), color='#d03f3b', interpolate=True) # unhealthy - plt.fill_between(x, y, y2=200, where=(y > 200), color='#be4173', interpolate=True) # vunhealthy - plt.fill_between(x, y, y2=300, where=(y > 300), color='#714261', interpolate=True) # hazardous - plt.fill_between(x, y, y2=0, where=(y > 0), color='#ffffff', alpha=0.1, interpolate=True) # soft - # handle passing ticks and lables separatly - if len(x_ticks) == 2: - plt.xticks(x_ticks[0], x_ticks[1]) - else: - plt.xticks(x_ticks) - plt.yticks(np.arange(0, y_max, step=50)) - plt.title(plt_title, fontsize=20) - plt.tight_layout() - plt.savefig(f'dyn/{file_name}.png', dpi = 300) - plt.figure() - plt.close('all') - - -def write_last7_plt(x, y_1, y_2, x_ticks, plt_title): - """ plot last-7 only """ - y_max = np.ceil(max(y_1.append(y_2))/50)*50 + 50 - # plot - plt.style.use('seaborn') - plt.plot(x, y_1, color='#313131', label='2hour avg') - plt.plot(x, y_2, color='#cc0000', label='daily avg') - plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#85a762', interpolate=True) # good - plt.fill_between(x, y_1, y2=50, where=(y_1 > 50), color='#d4b93c', interpolate=True) # moderate - plt.fill_between(x, y_1, y2=100, where=(y_1 > 100), color='#e96843', interpolate=True) # ufsg - plt.fill_between(x, y_1, y2=150, where=(y_1 > 150), color='#d03f3b', interpolate=True) # unhealthy - plt.fill_between(x, y_1, y2=200, where=(y_1 > 200), color='#be4173', interpolate=True) # vunhealthy - plt.fill_between(x, y_1, y2=300, where=(y_1 > 300), color='#714261', interpolate=True) # hazardous - plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#ffffff', alpha=0.1, interpolate=True) # soft - plt.xticks(x_ticks[0], x_ticks[1]) - plt.yticks(np.arange(0, y_max, step=50)) - plt.title(plt_title, fontsize=20) - plt.legend() - plt.tight_layout() - plt.savefig('dyn/last-7.png', dpi = 300) - plt.figure() diff --git a/backend/flask/app/graph_monthly.py b/backend/flask/app/graph_monthly.py deleted file mode 100644 index f0386b7..0000000 --- a/backend/flask/app/graph_monthly.py +++ /dev/null @@ -1,215 +0,0 @@ -""" handles monthly tasks """ - -import calendar -import json -import numpy as np -import pandas as pd - -from datetime import datetime, timedelta -from matplotlib import pyplot as plt -from os import path - -from app.db_connect import db_connect, db_close - - -def get_epoch(): - """ returns epoch for last month and last month last year """ - # run within first 7 days of month - now = datetime.now() - # last month - last_day = now.replace(day=1) - timedelta(days=1) - month_start = last_day.replace(day=1,hour=0,minute=0,second=0) - month_end = last_day.replace(hour=23,minute=59,second=59) - # last year - last_year = last_day.year - 1 - month_start_year = month_start.replace(year=last_year) - m_start_year_next = month_start_year + timedelta(days=31) - m_start_year_first = m_start_year_next.replace(day=1) - month_end_year = (m_start_year_first - timedelta(days=1)).replace(hour=23,minute=59,second=59) - # build tpl and return - last_month_tpl = (month_start.strftime('%s'), month_end.strftime('%s')) - last_year_tpl = (month_start_year.strftime('%s'), month_end_year.strftime('%s')) - return last_month_tpl, last_year_tpl - - -def get_rows(last_month_tpl, last_year_tpl, config): - """ get rows from postgres """ - conn, cur = db_connect(config) - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {last_month_tpl[0]} \ - AND epoch_time < {last_month_tpl[1]} \ - ORDER BY epoch_time DESC;' - ) - rows_month = cur.fetchall() - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {last_year_tpl[0]} \ - AND epoch_time < {last_year_tpl[1]} \ - ORDER BY epoch_time DESC;' - ) - rows_year = cur.fetchall() - db_close(conn, cur) - return rows_month, rows_year - - -def get_axis(rows_month, rows_year): - """ takes rows and returns axis """ - # initial df - x_timeline = [datetime.fromtimestamp(i[0]) for i in rows_month] - y_aqi_values = [int(i[1]) for i in rows_month] - data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values} - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - mean = indexed.resample('8h').mean().round() - # reset timestamp to day - mean.reset_index(level=0, inplace=True) - mean['timestamp'] = mean['timestamp'].dt.strftime('%d %H:%M') - mean.set_index('timestamp', inplace=True) - # second df with last year data - x_timeline = [datetime.fromtimestamp(i[0]) for i in rows_year] - y_aqi_values = [int(i[1]) for i in rows_year] - data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values} - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - year_mean = indexed.resample('8h').mean().round() - # reset timestamp to day - year_mean.reset_index(level=0, inplace=True) - year_mean['timestamp'] = year_mean['timestamp'].dt.strftime('%d %H:%M') - year_mean.set_index('timestamp', inplace=True) - # merge the two - mean['year_aqi'] = year_mean['year_aqi'] - mean.reset_index(level=0, inplace=True) - mean.sort_values(by='timestamp', ascending=True, inplace=True) - # return axis - x = mean['timestamp'] - y_1 = mean['now_aqi'] - y_2 = mean['year_aqi'] - return x, y_1, y_2, mean - - -def write_monthly_plot(x, y_1, y_2, timestamp): - """ plot last-7 only """ - # parse timestamp - date_from = datetime.fromtimestamp(timestamp) - date_title = date_from.strftime('%b %Y') - month_short = date_from.strftime('%b') - file_name = 'dyn/monthly/' + date_from.strftime('%Y-%m') + '.png' - plt_title = f'AQI values for: {date_title}' - # build ticks - y_max = np.ceil(max(y_1.append(y_2))/50)*50 + 50 - x_range = np.arange(0, len(x), step=9) - last_day = int(x.max().split()[0]) - x_numbers = np.arange(1, last_day + 1, step=3) - x_dates = [f'{str(i).zfill(2)} {month_short}' for i in x_numbers] - x_ticks = x_range, x_dates - # plot - plt.style.use('seaborn') - plt.plot(x, y_1, color='#313131', label='this year') - plt.plot(x, y_2, color='#666666', linestyle='dashed', label='last year') - plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#85a762', interpolate=True) # good - plt.fill_between(x, y_1, y2=50, where=(y_1 > 50), color='#d4b93c', interpolate=True) # moderate - plt.fill_between(x, y_1, y2=100, where=(y_1 > 100), color='#e96843', interpolate=True) # ufsg - plt.fill_between(x, y_1, y2=150, where=(y_1 > 150), color='#d03f3b', interpolate=True) # unhealthy - plt.fill_between(x, y_1, y2=200, where=(y_1 > 200), color='#be4173', interpolate=True) # vunhealthy - plt.fill_between(x, y_1, y2=300, where=(y_1 > 300), color='#714261', interpolate=True) # hazardous - plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#ffffff', alpha=0.1, interpolate=True) # soft - plt.xticks(x_ticks[0], x_ticks[1]) - plt.yticks(np.arange(0, y_max, step=50)) - plt.title(plt_title, fontsize=20) - plt.legend() - plt.tight_layout() - plt.savefig(file_name, dpi = 300) - plt.figure() - - -def get_change(curr, year): - """ helper function to get change on thresh """ - diff_avg = (curr - year) / curr - if diff_avg <= -0.15: - avg_change = 'down' - elif diff_avg >= 0.15: - avg_change = 'up' - else: - avg_change = 'same' - return avg_change - - -def get_aqi(val): - """ helper function to get aqi category """ - if val <= 50: - category = 'Good' - elif val > 50 and val <= 100: - category = 'Moderate' - elif val > 100 and val <= 150: - category = 'Unhealthy for Sensitive Groups' - elif val > 150 and val <= 200: - category = 'Unhealthy' - elif val > 200 and val <= 300: - category = 'Very Unhealthy' - else: - category = 'Hazardous' - return category - - -def write_monthly_json(mean, timestamp): - """ write json file with monthly details """ - date_from = datetime.fromtimestamp(timestamp) - file_name = 'dyn/monthly/' + date_from.strftime('%Y-%m') + '.json' - # current - curr_min = int(mean['now_aqi'].min()) - curr_max = int(mean['now_aqi'].max()) - curr_mean = int(mean['now_aqi'].mean()) - curr_cat = get_aqi(curr_mean) - # last - year_min = int(mean['year_aqi'].min()) - year_max = int(mean['year_aqi'].max()) - year_mean = int(mean['year_aqi'].mean()) - year_cat = get_aqi(year_mean) - # change - min_change = get_change(curr_min, year_min) - max_change = get_change(curr_max, year_max) - mean_change = get_change(curr_mean, year_mean) - # build rows - data_rows = [] - data_rows.append(['min: ', curr_min, year_min, min_change]) - data_rows.append(['max: ', curr_max, year_max, max_change]) - data_rows.append(['avg: ', curr_mean, year_mean, mean_change]) - data_rows.append(['avg aqi: ', curr_cat, year_cat, mean_change]) - # build dict - monthly_dict = {} - monthly_dict['data'] = data_rows - # write to disk - json_str = json.dumps(monthly_dict) - with open(file_name, 'w') as f: - f.write(json_str) - - -def monthly_found(timestamp): - """ check if monthly graph already created """ - date_from = datetime.fromtimestamp(timestamp) - file_name = 'dyn/monthly/' + date_from.strftime('%Y-%m') + '.png' - found = path.isfile(file_name) - return found - - -def create_monthly(config): - """ check if last month plot exists, create if needed """ - last_month_tpl, last_year_tpl = get_epoch() - timestamp = int(last_month_tpl[0]) - found = monthly_found(timestamp) - if found: - print('monthly already created, skipping...') - return - else: - print('creating monthly graph and json file') - # get rows - rows_month, rows_year = get_rows(last_month_tpl, last_year_tpl, config) - # get axis - x, y_1, y_2, mean = get_axis(rows_month, rows_year) - # write plot - write_monthly_plot(x, y_1, y_2, timestamp) - # write data json - write_monthly_json(mean, timestamp) diff --git a/backend/flask/app/graph_pm.py b/backend/flask/app/graph_pm.py deleted file mode 100644 index ce8b0c6..0000000 --- a/backend/flask/app/graph_pm.py +++ /dev/null @@ -1,187 +0,0 @@ -""" creates the PM 2.5 and pm 10 graphs """ - -from datetime import datetime - -from matplotlib import pyplot as plt -import numpy as np -import pandas as pd - -from app.db_connect import db_connect, db_close - - -def color_colums(y): - """ helper function to color bar columns """ - col = [] - for val in y: - if val <= 50: - # good - col.append('#85a762') - elif val > 50 and val <= 100: - # moderate - col.append('#d4b93c') - elif val > 100 and val <= 150: - # ufsg - col.append('#e96843') - elif val > 150 and val <= 200: - # unhealthy - col.append('#d03f3b') - elif val > 200 and val <= 300: - # vunhealthy - col.append('#be4173') - else: - # hazardous - col.append('#714261') - return col - - -def get_pm_data(config): - """ gets last 10 days worth of data""" - now = datetime.now() - day_until = int(now.date().strftime('%s')) - day_from = day_until - 10 * 24 * 60 * 60 - conn, cur = db_connect(config) - cur.execute( - f'SELECT epoch_time, pm25, pm10 FROM aqi \ - WHERE epoch_time > {day_from} \ - AND epoch_time < {day_until} \ - ORDER BY epoch_time DESC;' - ) - rows = cur.fetchall() - db_close(conn, cur) - return rows - - -def get_pm_axis(rows): - """ build axis """ - # build dataframe - x_timeline = [datetime.fromtimestamp(i[0]) for i in rows] - y_pm25_values = [int(i[1]) for i in rows] - y_pm10_values = [int(i[2]) for i in rows] - data = { - 'timestamp': x_timeline, - 'pm25': y_pm25_values, - 'pm10': y_pm10_values - } - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True, ascending=True) - mean = indexed.resample('1d').mean() - mean.reset_index(level=0, inplace=True) - # axis - mean['pm25'] = mean['pm25'].round() - mean['pm10'] = mean['pm10'].round() - x = mean['timestamp'] - y_1 = mean['pm25'] - y_2 = mean['pm10'] - return x, y_1, y_2 - - -def build_pm_plot(x, y, y_max, thresh, title): - """ write plots to file """ - file_name = title.replace('.', '') - # make ticks - x_range = np.arange(10).tolist() - x_date_time = pd.to_datetime(x).dt.date.unique() - x_dates = [i.strftime('%d %b') for i in x_date_time] - # color - col = [] - for val in y: - if val < thresh: - col.append('#6ecd65') - else: - col.append('#ff4d4d') - # title - plt_title = f'Daily avg PM {title} exposure' - # plot - plt.style.use('seaborn') - plt.bar(x_dates, y, color=col, width=0.5) - plt.axhline(y=thresh, color='#6ecd65', linestyle=':') - plt.xticks(ticks=x_range, labels=x_dates) - plt.yticks(np.arange(0, y_max, step=25)) - plt.title(plt_title, fontsize=20) - plt.tight_layout() - plt.savefig(f'dyn/pm{file_name}.png', dpi=300) - plt.close('all') - plt.figure() - - -def rebuild_pm_bar(config): - """ main function to rebuild pm2.5 and pm10 values """ - # get data - rows = get_pm_data(config) - x, y_1, y_2 = get_pm_axis(rows) - # max - y_max = np.ceil(max(y_1.append(y_2))/25)*25 + 25 - # pm 2.5 - build_pm_plot(x, y_1, y_max, thresh=25, title='2.5') - # pm 10 - build_pm_plot(x, y_2, y_max, thresh=50, title='10') - # done - print('recreated PM 2.5 and PM 10 graphs') - - -# hour bar chart -def get_hour_data(config): - """ get last three days worth of data from postgres """ - # time - now = datetime.now() - day_until = int(now.date().strftime('%s')) - day_from = day_until - 3 * 24 * 60 * 60 - # call db - conn, cur = db_connect(config) - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {day_from} \ - AND epoch_time < {day_until} \ - ORDER BY epoch_time DESC;' - ) - rows = cur.fetchall() - db_close(conn, cur) - return rows - - -def get_hour_axis(rows): - """ build x and y from the rows """ - x_timeline = [datetime.fromtimestamp(i[0]) for i in rows] - y_aqi_values = [int(i[1]) for i in rows] - # build dataframe - data = {'timestamp': x_timeline, 'aqi': y_aqi_values} - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - mean = indexed.resample('1h').mean() - # regroup by hour - mean_hour = mean.groupby([mean.index.hour]).mean() - mean_hour.reset_index(level=0, inplace=True) - # set axis - x = mean_hour['timestamp'] - y = mean_hour['aqi'].round() - return x, y - - -def build_hour_plot(x, y): - """ takes x and y and writes plot to file """ - plt_title = 'Last three days average AQI for each hour' - # ticks - x_range = np.arange(0, 24, step=3) - x_hours = [str(i).zfill(2) + ":00" for i in x_range] - y_max = np.ceil(max(y)/50) * 50 + 50 - # color columns - col = color_colums(y) - # create plot - plt.style.use('seaborn') - plt.bar(x, y, color=col, width=0.5) - plt.yticks(np.arange(0, y_max, step=50)) - plt.xticks(ticks=x_range, labels=x_hours) - plt.title(plt_title, fontsize=20) - plt.tight_layout() - plt.savefig('dyn/hours.png', dpi=300) - plt.close('all') - plt.figure() - - -def rebuild_hour_bar(config): - """ main function to rebuild houly bar avg """ - rows = get_hour_data(config) - x, y = get_hour_axis(rows) - build_hour_plot(x, y) diff --git a/backend/flask/app/table_export.py b/backend/flask/app/table_export.py deleted file mode 100644 index db7cb10..0000000 --- a/backend/flask/app/table_export.py +++ /dev/null @@ -1,140 +0,0 @@ -""" recreate json file to populate last year comparison table """ - -from datetime import datetime -import numpy as np -import pandas as pd -from matplotlib import pyplot as plt - -from app.db_connect import db_connect, db_close -from app.graph_pm import color_colums - - -def get_rows(config): - """ get rows from last 7 days - and last 7 days one year ago """ - now = datetime.now() - # last 10 - now_until = int(now.date().strftime('%s')) - now_from = now_until - 7 * 24 * 60 * 60 - # last 10 one year ago - year_until = now_until - 365 * 24 * 60 * 60 - year_from = now_until - 372 * 24 * 60 * 60 - # make the call - conn, cur = db_connect(config) - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {now_from} \ - AND epoch_time < {now_until} \ - ORDER BY epoch_time DESC;' - ) - now_rows = cur.fetchall() - cur.execute( - f'SELECT epoch_time, aqi_value FROM aqi \ - WHERE epoch_time > {year_from} \ - AND epoch_time < {year_until} \ - ORDER BY epoch_time DESC;' - ) - year_rows = cur.fetchall() - # close and return - db_close(conn, cur) - return now_rows, year_rows - - -def initial_df(now_rows, year_rows): - """ build mean df with year data split into columns """ - # first df with current data - x_timeline = [datetime.fromtimestamp(i[0]) for i in now_rows] - y_aqi_values = [int(i[1]) for i in now_rows] - data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values} - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - mean = indexed.resample('1d').mean().round() - # second df with last year data - x_timeline = [datetime.fromtimestamp(i[0]) for i in year_rows] - y_aqi_values = [int(i[1]) for i in year_rows] - data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values} - df = pd.DataFrame(data) - indexed = df.set_index('timestamp') - indexed.sort_values(by=['timestamp'], inplace=True) - year_mean = indexed.resample('1d').mean().round() - year_mean.reset_index(level=0, inplace=True) - # merge the two - mean.reset_index(level=0, inplace=True) - mean['year_aqi'] = year_mean['year_aqi'] - mean.sort_values(by='timestamp', ascending=False, inplace=True) - mean['timestamp'] = mean['timestamp'].dt.strftime('%d %b') - # return result - return mean - - -def write_df(mean): - """ finalize df and compare values """ - # build temp column with diff - mean['diff'] = (mean['now_aqi'] - mean['year_aqi']) / mean['now_aqi'] - mean['change'] = np.where(mean['diff'].abs() < 0.15, 'same', mean['diff']) - mean['change'] = np.where(mean['diff'] <= -0.15, 'down', mean['change']) - mean['change'] = np.where(mean['diff'] >= 0.15, 'up', mean['change']) - del mean['diff'] - # build average row on top - now_avg = mean['now_aqi'].mean() - year_avg = mean['year_aqi'].mean() - diff_avg = (now_avg - year_avg) / now_avg - if diff_avg <= -0.15: - avg_change = 'down' - elif diff_avg >= 0.15: - avg_change = 'up' - else: - avg_change = 'same' - - # build avg df - avg_row = {'timestamp': 'avg 7 days', 'now_aqi': now_avg, 'year_aqi': year_avg, 'change': avg_change} - new_row = pd.DataFrame(avg_row, index = [0]).round() - mean = pd.concat([new_row, mean]).reset_index(drop = True) - # convert to int - mean['now_aqi'] = mean['now_aqi'].astype('int') - mean['year_aqi'] = mean['year_aqi'].astype('int') - # extract and write json from df - mean_json = mean.to_json(orient='split') - with open('dyn/year-table.json', 'w') as f: - f.write(mean_json) - - -def write_graph(mean): - """ recreate barchart with yearly comparison """ - # build axis - mean.sort_index(inplace=True) - x = mean['timestamp'].to_list() - y_1 = mean['now_aqi'].to_list() - y_2 = mean['year_aqi'].to_list() - # build color lists - col_y_1 = color_colums(y_1) - col_y_2 = color_colums(y_2) - # set ticks - y_max = int(np.ceil(max(y_1 + y_2)/50) * 50 + 50) - x_indexes = np.arange(len(x)) - # build plot - width = 0.25 - plt_title = 'Daily avg AQI values compared to last year' - plt_suptitle = 'left: this year, right: last year' - plt.style.use('seaborn') - # write bars - plt.bar(x_indexes - (width / 2) - 0.02, y_1, color=col_y_1, width=width) - plt.bar(x_indexes + (width / 2) + 0.02, y_2, color=col_y_2, width=width) - plt.title(plt_suptitle,fontsize=15) - plt.suptitle(plt_title,fontsize=20, y=0.96) - plt.yticks(np.arange(0, y_max, step=50)) - plt.xticks(ticks=x_indexes, labels=x) - plt.tight_layout() - plt.savefig('dyn/year-graph.png', dpi=300) - plt.figure() - - -def rebuild_table(config): - """ main function to recreate year comparison table """ - now_rows, year_rows = get_rows(config) - mean = initial_df(now_rows, year_rows) - write_df(mean) - write_graph(mean) - # done - print('recreated year comparison graph and json file') diff --git a/backend/flask/app/views.py b/backend/flask/app/views.py deleted file mode 100644 index c41a8c2..0000000 --- a/backend/flask/app/views.py +++ /dev/null @@ -1,124 +0,0 @@ -import configparser -import json - -from flask import request -from flask_httpauth import HTTPBasicAuth -from apscheduler.schedulers.background import BackgroundScheduler - - -from app import app -from app import aqi_parser -from app import weather -from app import graph -from app import graph_pm -from app import table_export -from app import graph_monthly -from app.db_connect import db_insert - - -def get_config(): - """ read out config file """ - # parse - config_parser = configparser.ConfigParser() - config_parser.read('config') - # build dict - config = {} - config["authUsername"] = config_parser.get('aqi_monitor', "authUsername") - config["authPassword"] = config_parser.get('aqi_monitor', "authPassword") - config["api_key"] = config_parser.get('openweathermap', "api_key") - config["lat"] = config_parser.get('openweathermap', "lat") - config["lon"] = config_parser.get('openweathermap', "lon") - # db - config["db_host"] = config_parser.get('postgres', "db_host") - config["db_database"] = config_parser.get('postgres', "db_database") - config["db_user"] = config_parser.get('postgres', "db_user") - config["db_password"] = config_parser.get('postgres', "db_password") - return config - - -# start up -auth = HTTPBasicAuth() -config = get_config() -weather.handle_weather(config) -graph.create_current(config) -graph_pm.rebuild_pm_bar(config) -graph.rebuild_3days(config) -graph.rebuild_7days(config) -graph_pm.rebuild_hour_bar(config) -table_export.rebuild_table(config) -graph_monthly.create_monthly(config) - -# build username / pw dict for basic auth -USER_DATA = {} -USER_DATA[config['authUsername']] = config['authPassword'] - - -# start scheduler -scheduler = BackgroundScheduler() -scheduler.add_job( - weather.handle_weather, args=[config], trigger="interval", name='weather_api', seconds=900 -) -scheduler.add_job( - graph.create_current, args=[config], trigger="cron", minute='*/5', name='current_graph' -) -scheduler.add_job( - graph.rebuild_3days, args=[config], trigger="cron", day='*', hour='1', minute='1', name='3_days' -) -scheduler.add_job( - graph.rebuild_7days, args=[config], trigger="cron", day='*', hour='1', minute='2', name='7_days' -) -scheduler.add_job( - graph_pm.rebuild_pm_bar, args=[config], trigger="cron", day='*', hour='1', minute='3', name='pm_bar' -) -scheduler.add_job( - graph_pm.rebuild_hour_bar, args=[config], trigger="cron", day='*', hour='1', minute='4', name='hour_bar' -) -scheduler.add_job( - table_export.rebuild_table, args=[config], trigger="cron", day='*', hour='1', minute='6', name='rebuild_table' -) -scheduler.add_job( - graph_monthly.create_monthly, args=[config], trigger="cron", day='*', hour='1', minute='7', name='create_monthly' -) -scheduler.start() - - -@auth.verify_password -def verify(username, password): - if not (username and password): - return False - return USER_DATA.get(username) == password - - -# ingest -@app.route('/ingest', methods=['POST']) -@auth.login_required -def ingest(): - data = request.json - if data: - # populate data dict - json_dict, error_found = aqi_parser.input_process(data) - if error_found: - print('pm25 read failed') - print(json_dict) - else: - # save to db - time_stamp = db_insert(config, json_dict) - print(f'db insert done at {time_stamp}') - # save to webserver - data = json.dumps(json_dict) - with open('dyn/air.json', 'w') as f: - f.write(data) - print(data) - return 'ingest' - - -# output -@app.route('/') -def home(): - try: - with open('dyn/air.json', 'r') as f: - data = f.read() - except FileNotFoundError: - # will get regeneratod on next run - data = '{}' - return data diff --git a/backend/flask/app/weather.py b/backend/flask/app/weather.py deleted file mode 100755 index d02cc05..0000000 --- a/backend/flask/app/weather.py +++ /dev/null @@ -1,48 +0,0 @@ -""" get data from openweathermap.org """ - -from datetime import datetime -from time import sleep -import json - -import requests - - -def get_weather(config): - """ - gets the missing weather data from openweathermap - return: json string - """ - api_key = config['api_key'] - lat = config['lat'] - lon = config['lon'] - # get data - r = requests.get("https://api.openweathermap.org/data/2.5/weather?&units=metric&appid=" + api_key + "&lat=" + lat + "&lon=" + lon, timeout=20) - # format data - r_json = r.json() - weather_name = r_json['weather'][0]['main'] - weather_icon = r_json['weather'][0]['icon'] - wind_speed = r_json['wind']['speed'] - wind_direction = r_json['wind']['deg'] - # timestamp - now = datetime.now() - timestamp = now.strftime("%Y-%m-%d %H:%M:%S") - epoch_time = int(now.strftime('%s')) - # form dict - json_dict = {} - json_dict['weather_name'] = weather_name - json_dict['weather_icon'] = weather_icon - json_dict['wind_speed'] = wind_speed - json_dict['wind_direction'] = wind_direction - json_dict['timestamp'] = timestamp - json_dict['epoch_time'] = epoch_time - # return json string - weather_json = json.dumps(json_dict) - return weather_json, timestamp - - -def handle_weather(config): - """ sets infinite loop to collect api data """ - weather_json, timestamp = get_weather(config) - with open('dyn/weather.json', 'w') as f: - f.write(weather_json) - print(f'weather data updated: {timestamp}') diff --git a/backend/flask/config.sample b/backend/flask/config.sample deleted file mode 100644 index 0eb3afa..0000000 --- a/backend/flask/config.sample +++ /dev/null @@ -1,14 +0,0 @@ -[aqi_monitor] -authUsername = username -authPassword = password - -[openweathermap] -api_key = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -lat = 40.71 -lon = -73.99 - -[postgres] -db_host = postgres -db_database = aqi -db_user = aqi -db_password = aaaaaaaaaaaaaaaaa \ No newline at end of file diff --git a/backend/flask/run.py b/backend/flask/run.py deleted file mode 100644 index d014d8e..0000000 --- a/backend/flask/run.py +++ /dev/null @@ -1,26 +0,0 @@ -""" - -Documentation: https://flask.palletsprojects.com - -Install on Arch -- sudo pacman -S python-flask -- sudo pacman -S python-flask-httpauth - -or with pip -- pip install Flask -- pip install Flask-HTTPAuth - - -Run Debug env: -export FLASK_APP=run.py -export FLASK_DEBUG=1 - -""" - - -from app import app - - - -if __name__ == "__main__": - app.run() diff --git a/deploy.sh b/deploy.sh index 34d74f0..38ba50c 100755 --- a/deploy.sh +++ b/deploy.sh @@ -4,12 +4,9 @@ rsync --progress -a docker-compose.yml vps2:docker/ rsync --progress -a env vps2:docker/ rsync --progress -a --delete-after helper_scripts vps2:docker/ -rsync --progress -a --delete-after \ - --exclude dyn --exclude config.sample --exclude __pychache__ \ - backend vps2:docker/ -rsync --progress -a --delete-after \ - --exclude dyn \ - frontend vps2:docker/ +rsync --progress -a --delete-after nginx vps2:docker/ +rsync --progress -a --delete-after --exclude config.json.sample --exclude **/__pychache__ --exclude static/dyn \ + web vps2:docker/ ## exit 0 diff --git a/docker-compose.yml b/docker-compose.yml index bf242ff..2c9b50c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -34,26 +34,28 @@ services: - "com.github.jrcs.letsencrypt_nginx_proxy_companion.nginx_proxy" # backend flask flask: - build: ./backend/flask + build: ./web container_name: flask restart: always volumes: - - ./volume/flask/dyn:/app/dyn + - ./volume/flask/dyn:/app/static/dyn environment: - - APP_NAME=FlaskBackend + - APP_NAME=FlaskAqi - TZ=Asia/Bangkok expose: - "8080" - # backend nginx + # nginx infront of uwsgi nginx: - build: ./backend/nginx + build: ./nginx container_name: backend_nginx restart: always environment: - - VIRTUAL_HOST=data.lpb-air.com - - LETSENCRYPT_HOST=data.lpb-air.com + - VIRTUAL_HOST=www.lpb-air.com,lpb-air.com + - LETSENCRYPT_HOST=www.lpb-air.com,lpb-air.com expose: - "80" + depends_on: + - flask # backend postgres postgres: image: postgres @@ -65,29 +67,3 @@ services: - ./env/postgres.env expose: - "5432" - # php fast-cgi - php: - image: php:7-fpm - container_name: php-fpm - restart: always - volumes: - - ./frontend/nginx:/air - - type: bind - source: ./volume/flask/dyn - target: /air/dyn - # lpb-air - air: - image: nginx - container_name: lpb-air_nginx - restart: always - expose: - - "80" - volumes: - - type: bind - source: ./volume/flask/dyn - target: /air/dyn - - ./frontend/nginx:/air:rw - - ./frontend/nginx.conf:/etc/nginx/conf.d/default.conf - environment: - - VIRTUAL_HOST=www.lpb-air.com,lpb-air.com - - LETSENCRYPT_HOST=www.lpb-air.com,lpb-air.com diff --git a/frontend/nginx.conf b/frontend/nginx.conf deleted file mode 100644 index 2114710..0000000 --- a/frontend/nginx.conf +++ /dev/null @@ -1,23 +0,0 @@ -server { - listen 80; - index index.php index.html; - server_name localhost; - error_log /var/log/nginx/error.log; - access_log /var/log/nginx/access.log; - root /air; - - location ~ \.php$ { - try_files $uri =404; - fastcgi_split_path_info ^(.+\.php)(/.+)$; - fastcgi_pass php:9000; - fastcgi_index index.php; - include fastcgi_params; - fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; - fastcgi_param PATH_INFO $fastcgi_path_info; - } -} - -server { - server_name lpb-air.com; - return 301 https://www.lpb-air.com$request_uri; -} diff --git a/frontend/nginx/about/index.php b/frontend/nginx/about/index.php deleted file mode 100644 index ebc4993..0000000 --- a/frontend/nginx/about/index.php +++ /dev/null @@ -1,92 +0,0 @@ - - - - - - - - About - - - -
- cloud_animation -
-
- -
-
-
-

About

-

This page and its contents are still under construction. More content is coming soon.

-

The data for this page is collected from an air monitor located just outside of Luang Prabang, Laos. While we do our best, no guarantee is given for the accuracy of this data.

-

The data is updated every 3 minutes. Contrary to international websites who measure the air pollution via satellite images and rely on estimates and averages, an on-site air monitor delivers real time values that paint a much more accurate picture of the local situation.

-

Roughly, the Air Quality Index (AQI) is the internationally used air quality standard to measure the pollution of the air. It is divided into 6 levels, and according to these levels, certain health advices are given:

-
-
-
-
-

Aqi Values

-
-
-

Description

-
-
-
-
-

0 to 50:

-
-
-

Good: No health concerns, enjoy activities.

-
-
-
-
-

51 - 100:

-
-
-

Moderate: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.

-
-
-
-
-

101 - 150:

-
-
-

Unhealthy for Sensitive Groups: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.

-
-
-
-
-

151 - 200:

-
-
-

Unhealthy: Everyone may begin to experience health effects: Active children and adults, and people with respiratory disease, such as asthma, should avoid prolonged outdoor exertion; everyone else, especially children, should limit prolonged outdoor exertion

-
-
-
-
-

201 - 300:

-
-
-

Very Unhealthy: Active children and adults, and people with respiratory disease, such as asthma, should avoid all outdoor exertion; everyone else, especially children, should limit outdoor exertion.

-
-
-
-
-

301 - 500:

-
-
-

Hazardous: Everyone should avoid all outdoor exertion.

-
-
-
-
-

Credits

-

Partial Weather data, namely weather icon, weather description and windspeed are provided from openweather.org API distributed under the Creative Commons Attribution-ShareAlike 4.0 Generic License.

-

Lightbox made by Lokesh Dhakar, released under the MIT license.

-
-
- - - \ No newline at end of file diff --git a/frontend/nginx/css/style.css b/frontend/nginx/css/style.css deleted file mode 100644 index 374d778..0000000 --- a/frontend/nginx/css/style.css +++ /dev/null @@ -1,476 +0,0 @@ -@font-face { - font-family: Rubik-Bold; - src: url(/font/Rubik-Bold.ttf); -} - -@font-face { - font-family: Rubik-Light; - src: url(/font/Rubik-Light.ttf); -} - -@font-face { - font-family: Rubik-Regular; - src: url(/font/Rubik-Regular.ttf); -} - -body { - margin: 0; - padding: 0; - overflow: hidden; -} - -h1 { - font-family: Rubik-Bold; - font-size: 3em; -} - -h2 { - font-family: Rubik-Bold; -} - -h3 { - font-family: Rubik-Light; - font-size: 1.3em; -} - -p, li, td, th { - font-family: Rubik-Regular; - font-size: 1.1em; -} - -.preload { - position: fixed; - top: 0; - width: 100%; - height: 100vh; - background: #e6e6e6; - display: flex; - justify-content: center; - align-items: center; - z-index: 1; -} - -.preload img { - width: 100px; -} - -.preload-finish { - opacity: 0; - pointer-events: none; -} - -a { - color: inherit; - font-family: Rubik-Regular; -} - -.content { - width: 70%; - max-width: 900px; - margin-left: auto; - margin-right: auto; -} - -.content-date { - display: inline-block; - min-width: 90px; -} - -.colorbox { - color: #fff; - min-height: 40px; - margin: 0; - padding: 0; - top: 0; - z-index: 1; -} - -.colorbox p { - margin: 0; - padding: 10px 0; -} - -.colorbox ul { - margin: 0; - padding-bottom: 10px; -} - -.top_content { - display: flex; - flex-wrap: nowrap; -} - -.nav { - display: flex; - align-items: center; - flex: 20%; -} - -.nav ul { - list-style-type:none; -} - -.nav li { - padding: 5px 40px; - margin: 5px; - border-style: none none solid none; - border-width: 2px; -} - -.nav a { - text-decoration: none; -} - -.title { - flex: 80%; -} - -.divider { - padding: 30px 0; -} - -.divider hr { - border: 1px solid; -} - -.cloud { - position: relative; - padding-bottom: 30px; -} - -.cloud img { - width: 100%; -} - -.aqi_box { - position: absolute; - top: 45%; - left: 57%; - text-align: right; - transform: translate(-50%, -50%); - color: #fff; -} - -.aqi_box h1 { - font-size: 15em; - margin: 0; -} - -.aqi_box h2 { - font-family: Rubik-Light; - font-size: 1.8em; - margin: 0; -} - -.aqi_box p { - margin: 0; -} - -/* weather styling */ -.block_wrap { - margin-bottom: 40px; -} - -.weather_box { - border: solid 2px; - border-radius: 20px; - height: 100px; - position: relative; -} - -.weather_content { - display: grid; - grid-template-columns: auto auto auto auto; - grid-column-gap: 10px; -} - -.weather_icon { - width: 50px; - padding: 10px; - position: absolute; - top: 50%; - transform: translateY(-50%); -} - -.weather_icon img { - width: 100%; -} - -.weather_text { - width: 60%; - position: absolute; - top: 50%; - left: 40%; - transform: translateY(-50%); -} - -.weather_text h3, -.weather_text p { - margin: 10px 0; -} - -/* descriptions */ -.desc_wrap { - margin: 40px 0; - padding: 30px 0; -} - -.desc_content { - padding-top: 30px; - padding-bottom: 30px; - display: grid; - grid-template-columns: 33% 33% 33%; - grid-column-gap: 10px; -} - -.desc_box { - margin: auto; -} - -.desc_item_wrap { - display: flex; - align-items: center; - justify-content: center; - flex-wrap: wrap; -} - -.desc_item { - padding: 8px 10px; - margin: 4px; - color: #fff; - text-align: center; - width: 200px; - transform: scale(1); - transition-timing-function: ease; - -webkit-transition: transform 2s; - -moz-transition: transform 2s; - -ms-transition: transform 2s; - -o-transition: transform 2s; - transition: transform 2s; -} - -.good { - background-color: #85a762; -} -.moderate { - background-color: #d4b93c; -} -.ufsg { - background-color: #e96843; -} -.unhealthy { - background-color: #d03f3b; -} -.vunhealthy { - background-color: #be4173; -} -.hazardous { - background-color: #714261; -} - -.category_icon { - padding: 30px; -} -.category_icon img { - width: 100%; -} - -.tagline_content { - text-align: center; -} - -.desc_item.active { - transform: scale(1.2); - font-size: 1.1em; - margin: 15px; - box-shadow: darkgray 5px 5px; - font-weight: bold; -} - -/* table */ -.year-table { - display: flex; - justify-content: center; -} - -.year-table table{ - width: 100%; - max-width: 500px; - table-layout: fixed; - margin: auto; -} - -.year-table thead th { - padding: 5px 0; - background-color: #eeeeee; -} - -.year-table tbody td:nth-child(1) { - padding: 3px 0 3px 10px; - background-color: #eeeeee; -} - -.year-table td:nth-child(2), -.year-table td:nth-child(3), -.year-table td:nth-child(4) { - text-align: center; - color: #fff; -} - -/* about */ -.aqirow { - display: flex; - width: 100%; - height: 100%; - margin: 10px auto; - padding: 0; -} - -.leftcolumn { - width: 150px; - margin: 0; - padding: 10px 15px; - display: flex; -} - -.leftcolumn.category-class { - font-weight: bold; - color: #FFFFFF; -} -.rightcolumn { - width: 100%; - margin: 0px; - padding: 10px; - justify-content: center; -} - -.credits { - padding-top: 30px; - padding-bottom: 30px; -} - -.leftcolumn p, -.rightcolumn p { - margin: 0; - padding: 0; - align-items: center; - display: flex; -} - -/* graphs */ -.graph2 { - display: grid; - grid-template-columns: 50% 50%; - grid-column-gap: 10px; -} - -.graph3 { - display: flex; -} - -.graph_item img { - width: 100%; -} - -/* footer */ -.footer_wrap { - height: 50px; - display: flex; - justify-content: center; - align-items: center; - color: #fff; -} - - -/* responsiv */ -@media screen and (max-width: 1100px) { - .top_content, - .tagline_content, - .weather_content { - width: 90%; - } - .desc_content { - grid-template-columns: repeat(2, 1fr); - } - .desc_box:nth-child(1) { - grid-column: span 1; - grid-row: span 2; - } - .desc_box:nth-child(2), - .desc_box:nth-child(3) { - grid-column: span 1; - grid-row: span 1; - } -} - -@media screen and (max-width: 800px) { - h1 { - font-size: 2.5em; - } - .top_content { - flex-direction: column-reverse; - } - .content { - width: 95%; - } - .nav { - flex-wrap: nowrap; - } - .nav li { - display: inline-flex; - padding: 5px; - } - .nav ul { - margin: 10px auto; - padding: 0; - } - .title h1 { - margin: 0; - } - .title { - padding: 0; - } - .aqi_box { - width: 85%; - top: 40%; - text-align: center; - } - .aqi_box h1 { - font-size: 5em; - } - .aqi_box h2 { - font-size: 1.5em; - } - .weather_content { - grid-template-columns: auto auto; - grid-row-gap: 10px; - } - .weather_icon { - padding: 5px; - } - .weather_icon img { - width: 100%; - } - .weather_text { - left: 35%; - } - .weather_text h3 { - font-size: 1.2; - } - .desc_content { - display: block; - } - .desc_item { - padding: 0 10px; - width: 50%; - } - .category_icon { - padding: 0 20%; - } - .graph2 { - display: block; - } - .graph3 { - flex-direction: column-reverse; - } -} \ No newline at end of file diff --git a/frontend/nginx/graphs/index.php b/frontend/nginx/graphs/index.php deleted file mode 100644 index 456c0cf..0000000 --- a/frontend/nginx/graphs/index.php +++ /dev/null @@ -1,147 +0,0 @@ - - - - - - - - - Graphs - - - - -
- cloud_animation -
-
- -
-
-
-

Graphs

-

All the graphs and table on this page will get recreated every night with the newest values.

-
-
-

Last three days

-
-
-
-

Three days ago

- - day-3 - -
-
-

Two days ago

- - day-2 - -
-
-

Yesterday

- - day-1 - -
-
-
-
-
-
-

Particle Matter sizes

-

There is no healthy level of pollution. Particle matter (PM) are defined in two different sizes: PM 2.5 which represents particle sizes smaller than 2.5 µm or less than 1/20th of the diameter of a human hair and PM 10 which represents particle sizer smaller than 10 µm or 1/5th of the diameter of a human hair.

-

The WHO is providing more details on their website regarding particle matter and their health implications. On Wikipedia there are some interesting links to studies for further reading.

-
-
-
- - pm 2.5 bar chart - -
-
-

PM 2.5

-

Particle matter sizes smaller than 2.5µm are the most problematic as these particles will find their way through the lungs into the bloodstream.

-

The WHO Air quality guideline values set a 25 µg/m³ 24-hour average as an upper level threshold. In the 10 days overview you can see:

-

Green: Daily average exposure below 25 µg/m³
- Red: Daily average exposure above 25 µg/m³

-
-
-
-
- - pm 10 bar chart - -
-
-

PM 10

-

The threshold for the daily average PM 10 exposure is set to 50 µg/m³ by the WHO. Particles this size can penetrate and lodge deep inside the lungs but are too big to enter the blood stream. For this reason the threshold is higher.

-

In the 10 days overview you can see:

-

Green: Daily average exposure below 50 µg/m³
- Red: Daily average exposure above 50 µg/m³

-
-
-
-
-
-
-

Hour by Hour

-
-
-
- - hours 10 bar chart - -
-
-

Hourly AQI average

-

The AQI value can change a lot during the day. This can depend on the wind, cooking on fire or just another additional source of pollution nearby.

-

In this chart you can see the average AQI for each hour. This data is based on the last three days. This data can help to visualize which hours in the past three days have been better or worse on average.

-
-
-
-
-
-
-

Compared to last year

-

This year's daily average AQI values from last 7 days compared to corresponding values from last year.

-
-
-
- - - - - - - - - - - - '; - foreach($row as $cell) { - echo ''; - } - echo ''; - } - ?> - - -
this yearlast yearchange
' . $cell . '
-
-
- - last year comparison - -
-
-
- - - \ No newline at end of file diff --git a/frontend/nginx/incl/footer.html b/frontend/nginx/incl/footer.html deleted file mode 100644 index 7035721..0000000 --- a/frontend/nginx/incl/footer.html +++ /dev/null @@ -1,5 +0,0 @@ - \ No newline at end of file diff --git a/frontend/nginx/incl/topnav.php b/frontend/nginx/incl/topnav.php deleted file mode 100644 index f20acdb..0000000 --- a/frontend/nginx/incl/topnav.php +++ /dev/null @@ -1,18 +0,0 @@ -
- -
-
-
-

Live Air Quality

-

in Luang Prabang Laos PDR

-

Last updated:

-
- -
\ No newline at end of file diff --git a/frontend/nginx/index.php b/frontend/nginx/index.php deleted file mode 100644 index 718c655..0000000 --- a/frontend/nginx/index.php +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - - - - AQI - - - - - - - - - -
- cloud_animation -
-
- -
-
- cloud -
-

-

US AQI

-

-
-
-
-
-
-
-
-
- weather_icon -
-
-

°C

-

-
-
-
-
- wind_icon -
-
-

Wind

-

km/h

-
-
-
-
- humidity_icon -
-
-

Humidity

-

%

-
-
-
-
- pressure_icon -
-
-

Pressure

-

mbar

-
-
-
-
-
-
-
-
-
-

GOOD

-
-
-

MODERATE

-
-
-

UNHEALTHY FOR SENSITIVE GROUPS

-
-
-

UNHEALTHY

-
-
-

VERY UNHEALTHY

-
-
-

HAZARDOUS

-
-
-
-
-
- category_icon -
-
-
-

-

-

Good: No health concerns, enjoy activities.

-
-
-
-
-
-
-

Last three hours

- - current - -
-
-

Last 7 days

- - last-7 days - -
-
-
- - - \ No newline at end of file diff --git a/frontend/nginx/monthly/index.php b/frontend/nginx/monthly/index.php deleted file mode 100644 index 788127b..0000000 --- a/frontend/nginx/monthly/index.php +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - Monthly - - - - -
- cloud_animation -
-
- -
-
-
-

Month by month

-

Month compared to last year. Values are in 8h average.

-
- - format('F Y'); - echo '

'.$date_str.'

'; - echo '
'; - echo ''; - echo '
'; - echo ''; - echo ''; - foreach($rows as $row) { - echo ''; - foreach($row as $cell) { - echo ''; - } - echo ''; - } - echo ''; - echo '
this yearlast yearchange
' . $cell . '
'; - echo '
'; - } - ?> - -
- - \ No newline at end of file diff --git a/backend/nginx/Dockerfile b/nginx/Dockerfile similarity index 79% rename from backend/nginx/Dockerfile rename to nginx/Dockerfile index e60bd5b..137f097 100644 --- a/backend/nginx/Dockerfile +++ b/nginx/Dockerfile @@ -5,4 +5,4 @@ FROM nginx RUN rm /etc/nginx/conf.d/default.conf # Replace with our own nginx.conf -COPY nginx.conf /etc/nginx/conf.d/ +COPY nginx.conf /etc/nginx/conf.d/ \ No newline at end of file diff --git a/backend/nginx/nginx.conf b/nginx/nginx.conf similarity index 100% rename from backend/nginx/nginx.conf rename to nginx/nginx.conf diff --git a/web/Dockerfile b/web/Dockerfile new file mode 100644 index 0000000..ddbe741 --- /dev/null +++ b/web/Dockerfile @@ -0,0 +1,19 @@ +FROM python + +COPY . /srv/flask_app +WORKDIR /srv/flask_app + +RUN mkdir -p static/dyn/monthly +RUN chmod 777 -R . + +RUN apt-get clean && apt-get -y update + +RUN apt-get -y install python3-dev \ + && apt-get -y install build-essential + +RUN pip install -r requirements.txt --src /usr/local/src + + +RUN chmod +x ./start.sh + +CMD ["./start.sh"] diff --git a/web/config.json.sample b/web/config.json.sample new file mode 100644 index 0000000..cadc187 --- /dev/null +++ b/web/config.json.sample @@ -0,0 +1,17 @@ +{ + "aqi_monitor": { + "authUsername": "user", + "authPassword": "password" + }, + "openweathermap": { + "api_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + "lat": 40.71, + "lon": -74.00 + }, + "postgres": { + "db_host": "postgres", + "db_database": "aqi", + "db_user": "aqi", + "db_password": "xxxxxxxxxxxxxxxxxxxx" + } +} \ No newline at end of file diff --git a/backend/flask/requirements.txt b/web/requirements.txt similarity index 63% rename from backend/flask/requirements.txt rename to web/requirements.txt index 2cbff17..46e4a68 100644 --- a/backend/flask/requirements.txt +++ b/web/requirements.txt @@ -1,6 +1,7 @@ -apscheduler +APScheduler +Flask_HTTPAuth +Flask_Table Flask -Flask-HTTPAuth matplotlib numpy pandas diff --git a/web/src/__init__.py b/web/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/web/src/db.py b/web/src/db.py new file mode 100644 index 0000000..ee6e64d --- /dev/null +++ b/web/src/db.py @@ -0,0 +1,245 @@ +""" handle db connections """ + +import json +from datetime import datetime + +import psycopg2 +import requests + +from src.helper import get_config + + +class DatabaseConnect: + """ handle db """ + + CONFIG = get_config() + + def __init__(self): + self.conn, self.cur = self.db_connect() + + def db_connect(self): + """ returns connection and curser """ + # Connect to database + conn = psycopg2.connect( + host=self.CONFIG['postgres']['db_host'], + database=self.CONFIG['postgres']['db_database'], + user=self.CONFIG['postgres']['db_user'], + password=self.CONFIG['postgres']['db_password'] + ) + # Open a cursor to perform database operations + cur = conn.cursor() + return conn, cur + + def db_execute(self, query): + """ run a query """ + if isinstance(query, str): + self.cur.execute( + query + ) + rows = self.cur.fetchall() + elif isinstance(query, tuple): + self.cur.execute( + query[0], query[1] + ) + rows = False + + return rows + + def db_close(self): + """ clean close the conn and curser """ + self.conn.commit() + self.cur.close() + self.conn.close() + + +class IngestLine: + """ handle data input from monitor """ + + def __init__(self, data): + self.aqi_query = None + self.weather_query = None + self.input_json = data + self.add_aqi() + self.add_timestamp() + self.add_weather() + self.add_query() + + def add_aqi(self): + """ add aqi_value and aqi_category keys from pm2.5 value """ + + aqi_breakpoints = [ + ('Good', 0, 12.0, 0, 50), + ('Moderate', 12.1, 35.4, 51, 100), + ('Unhealthy for Sensitive Groups', 35.5, 55.4, 101, 150), + ('Unhealthy', 55.5, 150.4, 151, 200), + ('Very Unhealthy', 150.5, 250.4, 201, 300), + ('Hazardous', 250.5, 500.4, 301, 500), + ] + + pm25 = self.input_json['pm25'] + for i in aqi_breakpoints: + aqi_category, p_low, p_high, a_low, a_high = i + if p_low < pm25 < p_high: + # found it + break + + aqi = (a_high - a_low) / (p_high - p_low) * (pm25 - p_low) + a_low + + aqi_dict = { + 'aqi_value': round(aqi), + 'aqi_category': aqi_category + } + + self.input_json.update(aqi_dict) + + def add_timestamp(self): + """ add timestamp to dict """ + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S") + epoch_time = int(now.strftime('%s')) + + time_stamp_dict = { + 'time_stamp': timestamp, + 'epoch_time': epoch_time + } + + self.input_json.update(time_stamp_dict) + + def add_weather(self): + """ add weather data to dict """ + weather = Weather().last_weather + self.input_json.update(weather) + + def add_query(self): + """ add aqi and weather query to self """ + aqi_keys = ( + 'epoch_time', 'sensor_id', 'time_stamp', 'uptime', + 'pm25', 'pm10', 'aqi_value', 'aqi_category' + ) + aqi_query = self.build_query(aqi_keys, 'aqi') + weather_keys = ( + 'epoch_time', 'sensor_id', 'time_stamp', 'temperature', + 'pressure', 'humidity', 'wind_speed', 'wind_direction', + 'weather_name', 'weather_icon' + ) + weather_query = self.build_query(weather_keys, 'weather') + self.aqi_query = aqi_query + self.weather_query = weather_query + + def build_query(self, keys, table): + """ stitch query together for psycopg2 """ + keys_str = ', '.join(keys) + valid = ', '.join(['%s' for i in keys]) + values = tuple(self.input_json[i] for i in keys) + + query = (f'INSERT INTO {table} ({keys_str}) VALUES ({valid});', values) + + return query + + +class Weather: + """ handle weather lookup from API """ + + CONFIG = get_config() + + def __init__(self): + now = datetime.now() + self.epoch_time = int(now.strftime('%s')) + self.last_weather = self.get_weather() + + def get_weather(self): + """ get weather from disk or api if too old """ + try: + last_dict = self.get_cache() + except FileNotFoundError: + # create for first time + last_dict = self.get_openweather() + last_epoch = last_dict['epoch_time'] + + if self.epoch_time - last_epoch > 10 * 60: + print('get new weather data') + weather = self.get_openweather() + else: + print('reuse weather data') + weather = last_dict + + del weather['epoch_time'] + + return weather + + def get_openweather(self): + """ get missing weatherdata from openweathermap api """ + api_key = self.CONFIG['openweathermap']['api_key'] + lat = self.CONFIG['openweathermap']['lat'] + lon = self.CONFIG['openweathermap']['lon'] + + url = ('https://api.openweathermap.org/data/2.5/weather' + + f'?&units=metric&appid={api_key}&lat={lat}&lon={lon}') + resp = requests.get(url, timeout=20).json() + weather = { + 'weather_name': resp['weather'][0]['main'], + 'weather_icon': resp['weather'][0]['icon'], + 'wind_speed': resp['wind']['speed'], + 'wind_direction': resp['wind']['deg'], + 'epoch_time': self.epoch_time + } + self.write_cache(weather) + + return weather + + @staticmethod + def get_cache(): + """ get last stored dict """ + with open('static/dyn/weather.json', 'r') as f: + last = f.read() + + last_dict = json.loads(last) + return last_dict + + @staticmethod + def write_cache(weather): + """ update last stored value """ + weather_str = json.dumps(weather) + with open('static/dyn/weather.json', 'w') as f: + f.write(weather_str) + + +def get_current(): + """ get last values from db """ + + db_handler = DatabaseConnect() + aqi = db_handler.db_execute( + 'SELECT time_stamp, aqi_value, aqi_category \ + FROM aqi ORDER BY epoch_time DESC LIMIT 1;' + ) + weather = db_handler.db_execute( + 'SELECT temperature, pressure, humidity, \ + wind_speed, weather_name, weather_icon \ + FROM weather ORDER BY epoch_time DESC LIMIT 1;' + ) + db_handler.db_close() + + json_dict = { + "temperature": weather[0][0], + "pressure": weather[0][1], + "humidity": weather[0][2], + "weather_name": weather[0][4], + "weather_icon": weather[0][5], + "timestamp": aqi[0][0], + "aqi_value": aqi[0][1], + "aqi_category": aqi[0][2], + "wind_speed": weather[0][3] + } + json_data = json.dumps(json_dict) + return json_data + + +def insert_data(data): + """ called from ingest route to make the db insert """ + + ingest = IngestLine(data) + + db_handler = DatabaseConnect() + _ = db_handler.db_execute(ingest.aqi_query) + _ = db_handler.db_execute(ingest.weather_query) + db_handler.db_close() diff --git a/web/src/graph_current.py b/web/src/graph_current.py new file mode 100644 index 0000000..8109333 --- /dev/null +++ b/web/src/graph_current.py @@ -0,0 +1,114 @@ +""" handle current graph export """ + +from datetime import datetime + +import numpy as np +import pandas as pd + +from matplotlib import pyplot as plt + +from src.db import DatabaseConnect +from src.helper import get_config, plt_fill + + +class CurrentPlot: + """ recreate the last 3h plot """ + + CONFIG = get_config() + FILENAME = 'static/dyn/current.png' + + def __init__(self): + self.now = datetime.now() + self.rows = self.get_data() + self.axis = None + + def get_data(self): + """ export from postgres """ + now_epoch = int(self.now.strftime('%s')) + last_3h = now_epoch - 3 * 60 * 60 + + query = ('SELECT epoch_time, aqi_value FROM aqi ' + f'WHERE epoch_time > {last_3h} ORDER BY epoch_time DESC;') + + db_handler = DatabaseConnect() + rows = db_handler.db_execute(query) + db_handler.db_close() + + return rows + + def build_title(self): + """ build title from timestamps """ + + time_from = datetime.fromtimestamp(self.rows[-1][0]).strftime('%H:%M') + time_until = datetime.fromtimestamp(self.rows[0][0]).strftime('%H:%M') + plt_title = f'AQI values last 3h: {time_from} - {time_until}' + + return plt_title + + def build_axis(self): + """ build plot axis """ + rows = self.rows + x_timeline = [datetime.fromtimestamp(i[0]) for i in rows] + y_aqi_values = [int(i[1]) for i in rows] + data = {'timestamp': x_timeline, 'aqi': y_aqi_values} + df = pd.DataFrame(data) + + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + mean = indexed.resample('3min').mean() + mean.interpolate( + method='linear', limit=1, inplace=True, limit_area='inside' + ) + mean.reset_index(level=0, inplace=True) + mean['timestamp'] = mean['timestamp'].dt.strftime('%H:%M') + mean['aqi'] = mean['aqi'].round() + plt_title = self.build_title() + # xticks + x_ticks = [] + for num, i in enumerate(mean['timestamp']): + minute = int(i.split(':')[1]) + if minute % 15 == 0: + x_ticks.append(num) + + axis = { + 'x': mean['timestamp'], + 'y': mean['aqi'], + 'x_ticks': x_ticks, + 'plt_title': plt_title + } + self.axis = axis + + def write_plt(self): + """ save plot to file """ + x = self.axis['x'] + y = self.axis['y'] + x_ticks = self.axis['x_ticks'] + # calc ticks + y_max = np.ceil(y.max()/50)*50 + 50 + # setup plot + plt.style.use('seaborn') + plt.plot(x, y, color='#313131',) + # fill colors + plt_fill(plt, x, y) + # handle passing ticks and lables separatly + if len(x_ticks) == 2: + plt.xticks(x_ticks[0], x_ticks[1]) + else: + plt.xticks(x_ticks) + plt.yticks(np.arange(0, y_max, step=50)) + plt.title(self.axis['plt_title'], fontsize=20) + plt.tight_layout() + plt.savefig(self.FILENAME, dpi=300) + plt.figure() + plt.close('all') + + +def main(): + """ main function to export current plot """ + print('current graph export') + current = CurrentPlot() + if current.rows: + current.build_axis() + current.write_plt() + else: + print('no rows found to export current graph') diff --git a/web/src/graph_monthly.py b/web/src/graph_monthly.py new file mode 100644 index 0000000..79f1f07 --- /dev/null +++ b/web/src/graph_monthly.py @@ -0,0 +1,219 @@ +""" handle all monthly tasks """ + +import json +from os import path + +from datetime import datetime, timedelta + +import numpy as np +import pandas as pd + +from matplotlib import pyplot as plt + +from src.db import DatabaseConnect +from src.helper import plt_fill + + +class MonthStatus: + """ check what needs to be done """ + + def __init__(self): + self.m_stamp, self.y_stamp = (None, None) + self.get_epoch() + self.found = self.check_needed() + + def get_epoch(self): + """ create relevant timestamps """ + # last month + now = datetime.now() + m_end = datetime(now.year, now.month, day=1) - timedelta(seconds=1) + m_start = datetime(m_end.year, m_end.month, day=1) + m_stamp = (int(m_start.strftime('%s')), int(m_end.strftime('%s'))) + # last year + y_now = now.replace(year=now.year - 1) + y_end = datetime(y_now.year, y_now.month, day=1) - timedelta(seconds=1) + y_start = datetime(y_end.year, y_end.month, day=1) + y_stamp = (int(y_start.strftime('%s')), int(y_end.strftime('%s'))) + # set + self.m_stamp = m_stamp + self.y_stamp = y_stamp + + def check_needed(self): + """ check if current months already exists """ + file_name = datetime.fromtimestamp(self.m_stamp[0]).strftime('%Y-%m') + file_path = path.join('static/dyn/monthly', file_name + '.png') + found = path.isfile(file_path) + return found + + +class MonthGenerator(MonthStatus): + """ create the monthly graph and json table """ + + def __init__(self): + super().__init__() + self.m_rows, self.y_rows = self.get_data() + self.axis = self.build_axis() + + def get_data(self): + """ export from postgres """ + m_query = ('SELECT epoch_time, aqi_value FROM aqi WHERE ' + f'epoch_time > {self.m_stamp[0]} AND ' + f'epoch_time < {self.m_stamp[1]} ' + 'ORDER BY epoch_time DESC;') + y_query = ('SELECT epoch_time, aqi_value FROM aqi WHERE ' + f'epoch_time > {self.y_stamp[0]} AND ' + f'epoch_time < {self.y_stamp[1]} ' + 'ORDER BY epoch_time DESC;') + # make the call + db_handler = DatabaseConnect() + m_rows = db_handler.db_execute(m_query) + y_rows = db_handler.db_execute(y_query) + db_handler.db_close() + return m_rows, y_rows + + def build_axis(self): + """ build axis from rows """ + # initial df + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.m_rows] + y_aqi_values = [int(i[1]) for i in self.m_rows] + data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values} + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + mean = indexed.resample('8h').mean().round() + # reset timestamp to day + mean.reset_index(level=0, inplace=True) + mean['timestamp'] = mean['timestamp'].dt.strftime('%d %H:%M') + mean.set_index('timestamp', inplace=True) + # second df with last year data + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.y_rows] + y_aqi_values = [int(i[1]) for i in self.y_rows] + data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values} + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + y_mean = indexed.resample('8h').mean().round() + # reset timestamp to day + y_mean.reset_index(level=0, inplace=True) + y_mean['timestamp'] = y_mean['timestamp'].dt.strftime('%d %H:%M') + y_mean.set_index('timestamp', inplace=True) + # merge the two + mean['year_aqi'] = y_mean['year_aqi'] + mean.reset_index(level=0, inplace=True) + mean.sort_values(by='timestamp', ascending=True, inplace=True) + # return axis + axis = { + 'x': mean['timestamp'], + 'y_1': mean['now_aqi'], + 'y_2': mean['year_aqi'] + } + return axis + + def write_plt(self): + """ write monthly plot """ + x = self.axis['x'] + y_1 = self.axis['y_1'] + y_2 = self.axis['y_2'] + # parse timestamp + date_month = datetime.fromtimestamp(self.m_rows[-1][0]).date() + date_title = date_month.strftime('%b %Y') + date_file = date_month.strftime('%Y-%m') + month_short = date_month.strftime('%b') + file_name = 'static/dyn/monthly/' + date_file + '.png' + # build ticks + y_max = np.ceil(max(y_1.append(y_2)) / 50) * 50 + 50 + x_range = np.arange(0, len(x), step=9) + last_day = int(x.max().split()[0]) + x_numbers = np.arange(1, last_day + 1, step=3) + x_dates = [f'{str(i).zfill(2)} {month_short}' for i in x_numbers] + x_ticks = x_range, x_dates + # plot + plt.style.use('seaborn') + plt.plot(x, y_1, color='#313131', label='this year') + plt.plot( + x, y_2, color='#666666', linestyle='dashed', label='last year' + ) + # fill colors + plt_fill(plt, x, y_1) + plt.xticks(x_ticks[0], x_ticks[1]) + plt.yticks(np.arange(0, y_max, step=50)) + plt.title(f'AQI values for: {date_title}', fontsize=20) + plt.legend() + plt.tight_layout() + plt.savefig(file_name, dpi=300) + plt.figure() + + @staticmethod + def get_aqi(val): + """ helper function to get aqi category """ + breakpoints = [ + ('Good', 0, 50), + ('Moderate', 50, 100), + ('Unhealthy for Sensitive Groups', 100, 150), + ('Unhealthy', 150, 200), + ('Very Unhealthy', 200, 300), + ('Hazardous', 300, 500), + ] + + for break_point in breakpoints: + category, min_val, max_val = break_point + if min_val < val <= max_val: + # found it + break + + return category + + @staticmethod + def get_change(m_val, y_val): + """ helper function to get change on thresh """ + diff_avg = (m_val - y_val) / m_val + if diff_avg <= -0.15: + avg_change = 'down' + elif diff_avg >= 0.15: + avg_change = 'up' + else: + avg_change = 'same' + return avg_change + + def write_table(self): + """ write json file with monthly details """ + date_month = datetime.fromtimestamp(self.m_rows[-1][0]).date() + date_file = date_month.strftime('%Y-%m') + file_name = 'static/dyn/monthly/' + date_file + '.json' + # current + m_min = int(self.axis['y_1'].min()) + m_max = int(self.axis['y_1'].max()) + m_avg = int(self.axis['y_1'].mean()) + m_cat = self.get_aqi(m_avg) + # last + y_min = int(self.axis['y_2'].min()) + y_max = int(self.axis['y_2'].max()) + y_avg = int(self.axis['y_2'].mean()) + y_cat = self.get_aqi(y_avg) + # build dict + monthly_dict = { + 'data': [ + ['min: ', m_min, y_min, self.get_change(m_min, y_min)], + ['max: ', m_max, y_max, self.get_change(m_max, y_max)], + ['avg: ', m_avg, y_avg, self.get_change(m_avg, y_avg)], + ['avg aqi: ', m_cat, y_cat, self.get_change(m_avg, y_avg)] + ] + } + # write to disk + with open(file_name, 'w') as f: + f.write(json.dumps(monthly_dict)) + + +def main(): + """ main to export monthly graph an table json """ + # check if needed + month_status = MonthStatus() + if month_status.found: + print('monthly already created, skipping...') + return + + # create + print('creating monthly graph and json file') + month_generator = MonthGenerator() + month_generator.write_plt() + month_generator.write_table() diff --git a/web/src/graph_nightly.py b/web/src/graph_nightly.py new file mode 100644 index 0000000..57a6e9d --- /dev/null +++ b/web/src/graph_nightly.py @@ -0,0 +1,483 @@ +""" handle nightly graph export """ + +from datetime import datetime, timedelta +import json + +import numpy as np +import pandas as pd +import scipy # pylint: disable=unused-import + +from matplotlib import pyplot as plt + +from src.db import DatabaseConnect +from src.helper import get_config, plt_fill + + +class NightlyPlots: + """ get nightly data """ + + CONFIG = get_config() + + def __init__(self): + self.now = datetime.now() + print('get data from db') + self.rows, self.y_rows = self.get_data() + + @staticmethod + def color_colums(y): + """ helper function to color bar columns """ + + breakpoints = [ + ('#85a762', 0, 50), # good + ('#d4b93c', 50, 100), # moderate + ('#e96843', 100, 150), # ufsg + ('#d03f3b', 150, 200), # unhealthy + ('#be4173', 200, 300), # vunhealthy + ('#714261', 300, 500), # hazardous + ] + + colors = [] + for value in y: + for break_point in breakpoints: + color, min_val, max_val = break_point + if min_val < value <= max_val: + # found it + colors.append(color) + break + + return colors + + def get_data(self): + """ export from postgres """ + # current + day_until = int(self.now.date().strftime('%s')) + day_from = day_until - 10 * 24 * 60 * 60 + query = ('SELECT epoch_time, aqi_value, pm25, pm10 FROM aqi WHERE ' + f'epoch_time > {day_from} AND epoch_time < {day_until} ' + 'ORDER BY epoch_time DESC;') + # last year + y_until = day_until - 365 * 24 * 60 * 60 + y_from = y_until - 10 * 24 * 60 * 60 + y_query = ('SELECT epoch_time, aqi_value FROM aqi WHERE ' + f'epoch_time > {y_from} AND epoch_time < {y_until} ' + 'ORDER BY epoch_time DESC;') + db_handler = DatabaseConnect() + rows = db_handler.db_execute(query) + y_rows = db_handler.db_execute(y_query) + db_handler.db_close() + + return rows, y_rows + + def recreate_last_7(self): + """ last seven days """ + day_until = int(self.now.date().strftime('%s')) + day_from = day_until - 7 * 24 * 60 * 60 + rows = [i for i in self.rows if day_from < i[0] < day_until] + date_from = datetime.fromtimestamp(day_from).strftime('%d %b') + date_until = datetime.fromtimestamp(day_until).strftime('%d %b') + plt_title = f'AQI values from: {date_from} until {date_until}' + _ = LastSevenDays(rows, plt_title) + + def recreate_last_3(self): + """ last three days """ + _ = LastThreeDays(self.rows, self.now) + + def recreate_pm_chart(self): + """ recreating pm2.5 and pm10 charts """ + _ = PmGraphs(self.rows) + + def recreate_hour_bar(self): + """ recreate hourly average through day bar chart """ + day_until = int(self.now.date().strftime('%s')) + day_from = day_until - 3 * 24 * 60 * 60 + rows = [i for i in self.rows if day_from < i[0] < day_until] + _ = HourBar(rows) + + def recreate_year_comparison(self): + """ recreate year comparison chart and table for json """ + _ = YearComparison(self.rows, self.y_rows) + + +class LastSevenDays: + """ recreate last 7 days """ + + FILENAME = 'static/dyn/last-7.png' + + def __init__(self, rows, plt_title): + print('recreating last seven days') + self.plt_title = plt_title + self.rows = rows + self.axis = self.build_axis() + self.write_plt() + + def build_axis(self): + """ calc x and y """ + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows] + y_aqi_values = [int(i[1]) for i in self.rows] + data = {'timestamp': x_timeline, 'aqi': y_aqi_values} + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + mean = indexed.resample('2h').mean() + mean['avg'] = mean['aqi'].resample('1d').mean() + mean['avg'] = mean.avg.shift(6) + # set first and last + mean['avg'][0] = (mean['avg'].iloc[6] + mean['aqi'][0]) / 2 + mean['avg'][-1] = (mean['avg'].iloc[-6] + mean['aqi'][-1]) / 2 + # smooth + mean['avg'].interpolate(method='polynomial', order=3, inplace=True) + mean.reset_index(level=0, inplace=True) + mean['timestamp'] = mean['timestamp'].dt.strftime('%Y-%m-%d %H:%M') + mean['aqi'] = mean['aqi'].round() + mean['avg'] = mean['avg'].round() + # make ticks + x_range = np.arange(0, 84, step=12) + x_date_time = pd.to_datetime(mean['timestamp']).dt.date.unique() + x_dates = np.asarray([i.strftime('%d %b') for i in x_date_time]) + x_ticks = x_range, x_dates + # set axis + axis = { + "x": mean['timestamp'], + "y_1": mean['aqi'], + "y_2": mean['avg'], + "x_ticks": x_ticks, + "plt_title": self.plt_title + } + return axis + + def write_plt(self): + """ write last 7 days plot to disk """ + x = self.axis['x'] + y_1 = self.axis['y_1'] + y_2 = self.axis['y_2'] + x_ticks = self.axis['x_ticks'] + y_max = np.ceil(max(y_1.append(y_2))/50)*50 + 50 + # plot + plt.style.use('seaborn') + plt.plot(x, y_1, color='#313131', label='2hour avg') + plt.plot(x, y_2, color='#cc0000', label='daily avg') + # fill colors + plt_fill(plt, x, y_1) + # ticks and plot + plt.xticks(x_ticks[0], x_ticks[1]) + plt.yticks(np.arange(0, y_max, step=50)) + plt.title(self.axis['plt_title'], fontsize=20) + plt.legend() + plt.tight_layout() + plt.savefig(self.FILENAME, dpi=300) + plt.figure() + + +class LastThreeDays: + """ recreate last three days plot """ + + def __init__(self, rows, now): + print('recreating last three days') + self.y_max = None + self.now = now + self.rows = rows + self.rebuild_last_three() + + def rebuild_last_three(self): + """ recreate all three graphs """ + # get axis + all_axis = [] + for day in range(1, 4): + axis = self.get_axis(day) + all_axis.append(axis) + # set y_max + self.y_max = max([max(i['y']) for i in all_axis]) + 50 + # plt + for idx, axis in enumerate(all_axis): + day = idx + 1 + self.write_plt(axis, day) + + def get_axis(self, day): + """ get axis for day passed in """ + day_delta = self.now.date() - timedelta(days=day) + day_from = int(day_delta.strftime('%s')) + day_until = int(day_delta.strftime('%s')) + 60 * 60 * 24 + day_rows = [i for i in self.rows if day_from < i[0] < day_until] + # title + time_stamp = day_delta.strftime('%Y-%m-%d') + # build + x_timeline = [datetime.fromtimestamp(i[0]) for i in day_rows] + y_aqi_values = [int(i[1]) for i in day_rows] + data = {'timestamp': x_timeline, 'aqi': y_aqi_values} + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + mean = indexed.resample('15min').mean() + mean.interpolate( + method='linear', limit=1, inplace=True, limit_area='inside' + ) + mean.reset_index(level=0, inplace=True) + mean['timestamp'] = mean['timestamp'].dt.strftime('%H:%M') + mean['aqi'] = mean['aqi'].round() + # set axis + axis = { + "x": mean['timestamp'], + "y": mean['aqi'], + "x_ticks": np.arange(0, 97, step=8), + "plt_title": f'AQI values from: {time_stamp}' + } + return axis + + def write_plt(self, axis, day): + """ write daily plot to disk """ + x = axis['x'] + y = axis['y'] + x_ticks = np.arange(0, 97, step=8) + plt.style.use('seaborn') + plt.plot(x, y, color='#313131',) + # fill colors + plt_fill(plt, x, y) + # ticks and plot + plt.xticks(x_ticks) + plt.yticks(np.arange(0, self.y_max, step=50)) + plt.title(axis['plt_title'], fontsize=20) + plt.tight_layout() + plt.savefig(f'static/dyn/day-{day}.png', dpi=300) + plt.figure() + plt.close('all') + + +class PmGraphs: + """ recreate avg pm10 and pm2.5 exposure graphs """ + + def __init__(self, rows): + print('recreating pm bar charts') + self.rows = rows + self.y_max = None + self.axis = self.get_axis() + self.write_plt(thresh=25, title='2.5') + self.write_plt(thresh=50, title='10') + + def get_axis(self): + """ get pm2.5 and pm20 axis """ + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows] + y_pm25_values = [int(i[2]) for i in self.rows] + y_pm10_values = [int(i[3]) for i in self.rows] + data = { + 'timestamp': x_timeline, + 'pm25': y_pm25_values, + 'pm10': y_pm10_values + } + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True, ascending=True) + mean = indexed.resample('1d').mean() + mean.reset_index(level=0, inplace=True) + # axis + axis = { + 'x': mean['timestamp'], + 'y_pm25': mean['pm25'].round(), + 'y_pm10': mean['pm10'].round() + } + # max + self.y_max = np.ceil( + max(axis['y_pm25'].append(axis['y_pm10'])) / 25 + ) * 25 + 25 + return axis + + def write_plt(self, thresh, title): + """ write plot to disk """ + file_name = title.replace('.', '') + plt_title = f'Daily avg PM {title} exposure' + x = self.axis['x'] + y = self.axis['y_pm' + file_name] + # make ticks + x_range = np.arange(10).tolist() + x_date_time = pd.to_datetime(x).dt.date.unique() + x_dates = [i.strftime('%d %b') for i in x_date_time] + # col + col = [] + for val in y: + if val < thresh: + col.append('#6ecd65') + else: + col.append('#ff4d4d') + # plot + plt.style.use('seaborn') + plt.bar(x_dates, y, color=col, width=0.5) + plt.axhline(y=thresh, color='#6ecd65', linestyle=':') + plt.xticks(ticks=x_range, labels=x_dates) + plt.yticks(np.arange(0, self.y_max, step=25)) + plt.title(plt_title, fontsize=20) + plt.tight_layout() + plt.savefig(f'static/dyn/pm{file_name}.png', dpi=300) + plt.close('all') + plt.figure() + + +class HourBar: + """ recreate hour by our avg bar chart """ + + def __init__(self, rows): + print('recreating hour avg bar chart') + self.rows = rows + self.axis = self.get_axis() + self.write_plt() + + def get_axis(self): + """ get hourly bar chart axis """ + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows] + y_aqi_values = [int(i[1]) for i in self.rows] + data = { + 'timestamp': x_timeline, + 'aqi': y_aqi_values + } + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + mean = indexed.resample('1h').mean() + # regroup by hour + mean_hour = mean.groupby([mean.index.hour]).mean() + mean_hour.reset_index(level=0, inplace=True) + axis = { + 'x': mean_hour['timestamp'], + 'y': mean_hour['aqi'].round() + } + return axis + + def write_plt(self): + """ write the hour bar chart to disk """ + plt_title = 'Last three days average AQI for each hour' + x = self.axis['x'] + y = self.axis['y'] + # ticks + x_range = np.arange(0, 24, step=3) + x_hours = [str(i).zfill(2) + ":00" for i in x_range] + y_max = np.ceil(max(y)/50) * 50 + 50 + # color columns + col = NightlyPlots.color_colums(y) + # create plot + plt.style.use('seaborn') + plt.bar(x, y, color=col, width=0.5) + plt.yticks(np.arange(0, y_max, step=50)) + plt.xticks(ticks=x_range, labels=x_hours) + plt.title(plt_title, fontsize=20) + plt.tight_layout() + plt.savefig('static/dyn/hours.png', dpi=300) + plt.close('all') + plt.figure() + + +class YearComparison: + """ export year on year graph and table """ + + def __init__(self, rows, y_rows): + print('recreating year comparison') + self.rows = rows + self.y_rows = y_rows + self.axis = self.get_axis() + self.write_table() + self.write_plt() + + def get_axis(self): + """ build axis """ + # first df with current data + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows] + y_aqi_values = [int(i[1]) for i in self.rows] + data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values} + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + mean = indexed.resample('1d').mean().round() + mean.reset_index(level=0, inplace=True) + # second df with last year data + x_timeline = [datetime.fromtimestamp(i[0]) for i in self.y_rows] + y_aqi_values = [int(i[1]) for i in self.y_rows] + data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values} + df = pd.DataFrame(data) + indexed = df.set_index('timestamp') + indexed.sort_values(by=['timestamp'], inplace=True) + year_mean = indexed.resample('1d').mean().round() + year_mean.reset_index(level=0, inplace=True) + # merge the two + mean['year_aqi'] = year_mean['year_aqi'] + mean.sort_values(by='timestamp', inplace=True) + mean['timestamp'] = mean['timestamp'].dt.strftime('%d %b') + # build diff + mean['diff'] = (mean['now_aqi'] - mean['year_aqi']) / mean['now_aqi'] + mean['change'] = np.where( + mean['diff'].abs() < 0.15, 'same', mean['diff'] + ) + mean['change'] = np.where( + mean['diff'] <= -0.15, 'down', mean['change'] + ) + mean['change'] = np.where(mean['diff'] >= 0.15, 'up', mean['change']) + # return axis + axis = { + 'x': mean['timestamp'], + 'y_1': mean['now_aqi'].astype('int'), + 'y_2': mean['year_aqi'].astype('int'), + 'change': mean['change'] + } + return axis + + def write_table(self): + """ write year comparison table json """ + # build average row on top + avg = int(self.axis['y_1'].mean()) + y_avg = int(self.axis['y_2'].mean()) + diff_avg = (avg - y_avg) / avg + if diff_avg <= -0.15: + avg_change = 'down' + elif diff_avg >= 0.15: + avg_change = 'up' + else: + avg_change = 'same' + avg_row = ('avg 7 days', avg, y_avg, avg_change) + # zip it + zipped = zip( + self.axis['x'], self.axis['y_1'], + self.axis['y_2'], self.axis['change'] + ) + data_rows = list(zipped) + data_rows.insert(0, avg_row) + json_dict = json.dumps({"data": data_rows}) + # write to file + with open('static/dyn/year-table.json', 'w') as f: + f.write(json_dict) + + def write_plt(self): + """ write year comparison bar chart """ + x = self.axis['x'] + y_1 = self.axis['y_1'] + y_2 = self.axis['y_2'] + # build colors + col_y_1 = NightlyPlots.color_colums(y_1) + col_y_2 = NightlyPlots.color_colums(y_2) + # set ticks + y_max = int(np.ceil((max(y_1.append(y_2)) / 50)) * 50 + 50) + x_indexes = np.arange(len(x)) + # build plot + width = 0.25 + plt_title = 'Daily avg AQI values compared to last year' + plt_suptitle = 'left: this year, right: last year' + plt.style.use('seaborn') + # write bars + plt.bar( + x_indexes - (width / 2) - 0.02, y_1, color=col_y_1, width=width + ) + plt.bar( + x_indexes + (width / 2) + 0.02, y_2, color=col_y_2, width=width + ) + plt.title(plt_suptitle, fontsize=15) + plt.suptitle(plt_title, fontsize=20, y=0.96) + plt.yticks(np.arange(0, y_max, step=50)) + plt.xticks(ticks=x_indexes, labels=x) + plt.tight_layout() + plt.savefig('static/dyn/year-graph.png', dpi=300) + plt.figure() + + +def main(): + """ collection of nightly exports """ + nightly = NightlyPlots() + nightly.recreate_last_7() + nightly.recreate_last_3() + nightly.recreate_pm_chart() + nightly.recreate_hour_bar() + nightly.recreate_year_comparison() diff --git a/web/src/helper.py b/web/src/helper.py new file mode 100644 index 0000000..e77c740 --- /dev/null +++ b/web/src/helper.py @@ -0,0 +1,78 @@ +""" collection of helper function and classes """ + +import json + +from flask_table import create_table, Col + + +def get_config(): + """ read config file """ + config_path = 'config.json' + + with open(config_path, 'r') as config_file: + data = config_file.read() + + config = json.loads(data) + + return config + + +def plt_fill(plt, x, y): + """ fill colors between break points """ + plt.fill_between( + x, y, y2=0, where=(y > 0), color='#85a762', interpolate=True + ) # good + plt.fill_between( + x, y, y2=50, where=(y > 50), color='#d4b93c', interpolate=True + ) # moderate + plt.fill_between( + x, y, y2=100, where=(y > 100), color='#e96843', interpolate=True + ) # ufsg + plt.fill_between( + x, y, y2=150, where=(y > 150), color='#d03f3b', interpolate=True + ) # unhealthy + plt.fill_between( + x, y, y2=200, where=(y > 200), color='#be4173', interpolate=True + ) # vunhealthy + plt.fill_between( + x, y, y2=300, where=(y > 300), color='#714261', interpolate=True + ) # hazardous + plt.fill_between( + x, y, y2=0, where=(y > 0), color='#ffffff', alpha=0.1, interpolate=True + ) # soft + + +class Table: + """ create html table from filename to pass to template """ + + COLUMNS = [' ', 'this year', 'last year', 'change'] + + def __init__(self, filename): + self.filename = filename + self.rows = self.get_rows() + + def get_rows(self): + """ read filename to build rows dict """ + + with open(self.filename, 'r') as json_file: + json_raw = json_file.read() + + table_json = json.loads(json_raw) + + rows = [] + for i in table_json['data']: + row = dict(zip(self.COLUMNS, i)) + rows.append(row) + + return rows + + def create_table(self): + """ create the table with rows and columns """ + + blank_table = create_table(options={'classes': ['comp-table']}) + + for i in self.COLUMNS: + blank_table.add_column(i, Col(i)) + + table_obj = blank_table(self.rows) + return table_obj diff --git a/web/start.sh b/web/start.sh new file mode 100644 index 0000000..5567b21 --- /dev/null +++ b/web/start.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +uwsgi --ini uwsgi.ini \ No newline at end of file diff --git a/frontend/nginx/css/lightbox.css b/web/static/css/lightbox.css similarity index 100% rename from frontend/nginx/css/lightbox.css rename to web/static/css/lightbox.css diff --git a/web/static/css/style.css b/web/static/css/style.css new file mode 100644 index 0000000..9f91858 --- /dev/null +++ b/web/static/css/style.css @@ -0,0 +1,365 @@ +* { + padding: 0; + margin: 0; +} + +@font-face { + font-family: Rubik-Bold; + src: url(../font/Rubik-Bold.ttf); +} + +@font-face { + font-family: Rubik-Light; + src: url(../font/Rubik-Light.ttf); +} + +@font-face { + font-family: Rubik-Regular; + src: url(../font/Rubik-Regular.ttf); +} + +body { + margin: 0; + padding: 0; + overflow: hidden; +} + +h1 { + font-family: Rubik-Bold; + font-size: 3em; + margin-bottom: 15px; +} + +h2 { + font-family: Rubik-Bold; + margin-bottom: 10px; +} + +h3 { + font-family: Rubik-Light; + font-size: 1.3em; + margin-bottom: 10px; +} + +p, li, td, th { + font-family: Rubik-Regular; + font-size: 1.1em; +} + +a { + color: inherit; + font-family: Rubik-Regular; +} + +.block-text { + margin-bottom: 15px; +} + +.full-width { + padding: 10px; +} + +.content { + width: 70%; + max-width: 900px; + margin-left: auto; + margin-right: auto; + padding: 20px 0; +} + +.content-subtitle { + max-width: 900px; + margin-left: auto; + margin-right: auto; + padding-top: 20px; +} + +.col-box { + width: 70%; + min-height: 20px; + max-width: 900px; + margin-left: auto; + margin-right: auto; + color: #fff; +} + +.preload { + position: fixed; + top: 0; + width: 100%; + height: 100vh; + background: #e6e6e6; + display: flex; + justify-content: center; + align-items: center; + z-index: 1; +} + +.preload img { + width: 100px; +} + +.preload-finish { + opacity: 0; + pointer-events: none; +} + +.good { + background-color: #85a762; +} +.moderate { + background-color: #d4b93c; +} +.ufsg { + background-color: #e96843; +} +.unhealthy { + background-color: #d03f3b; +} +.vunhealthy { + background-color: #be4173; +} +.hazardous { + background-color: #714261; +} + +/* title start */ +.title { + display: flex; + justify-content: space-between; + align-items: center; +} + +.title-nav ul { + list-style-type:none; +} + +.title-nav li { + padding: 5px 40px; + margin: 5px; + border-style: none none solid none; + border-width: 2px; +} + +.title-nav a { + text-decoration: none; +} +/* title end */ + +/* cloud start */ +.cloud { + position: relative; + padding-bottom: 30px; +} + +.cloud img { + width: 100%; +} + +.aqi_box { + position: absolute; + top: 45%; + left: 57%; + text-align: right; + transform: translate(-50%, -50%); + color: #fff; +} + +.aqi_box h1 { + font-size: 15em; + margin: 0; +} + +.aqi_box h2 { + font-family: Rubik-Light; + font-size: 1.8em; + margin: 0; +} + +.aqi_box p { + margin: 0; +} +/* cloud end */ + +/* weather start */ +.weather { + display: flex; + justify-content: space-between; + align-items: center; +} + +.weather_box { + border: solid 2px; + border-radius: 20px; + display: flex; + align-items: center; + justify-content: flex-start; + padding: 10px 0; + width: 24%; +} + +.weather_icon img { + width: 50px; + margin: 10px; +} +/* weather end */ +/* category start */ +.category { + display: flex; + align-items: center; + justify-content: space-between; +} + +.cat-icon img { + width: 100%; + max-width: 200px; +} +.cat-item { + color: #fff; + margin: 10px; + padding: 15px; + text-align: center; +} +.cat-item.active { + transform: scale(1.2); + font-size: 1.1em; + margin: 15px; + box-shadow: darkgray 5px 5px; + font-weight: bold; +} +/* category end */ + +/* last graph start */ +.graph { + display: flex; + align-items: center; + justify-content: initial; +} + +.graph-item { + box-sizing: border-box; + width: 100%; +} + +.graph-item img { + width: 100%; +} +/* last graph end */ + +/* table start */ +.comp-table { + width: 100%; + table-layout: fixed; + margin: auto; +} + +.comp-table thead th { + padding: 5px 0; + background-color: #eeeeee; +} + +.comp-table tbody td:nth-child(1) { + padding: 3px 0 3px 10px; + background-color: #eeeeee; +} + +.comp-table td:nth-child(2), +.comp-table td:nth-child(3), +.comp-table td:nth-child(4) { + text-align: center; + color: #fff; +} +/* table end */ + +/* about start */ +.category-table th { + text-align: left; + padding: 0 10px; + min-width: 150px; +} + +.category-table td { + padding: 10px; + min-width: 150px; +} + +.category-table td:nth-child(1) { + color: #fff; +} + +.category-table tr { + display: block; + margin: 10px 0; +} +/* about end */ + +/* responsiv start */ +@media screen and (max-width: 1000px) { + .content { + width: 90%; + } +} + +@media screen and (max-width: 600px) { + h1 { + font-size: 2.5em; + } + .content { + width: 100%; + } + .title { + flex-direction: column-reverse; + width: 100%; + padding-top: 0; + } + .title-nav li { + float: left; + padding: 10px; + } + #aqiValue { + font-size: 6em; + } + .aqi_box { + top: 38%; + left: 50%; + width: 60%; + } + .weather { + flex-wrap: wrap; + } + .weather_box { + width: 47%; + } + .weather_box:nth-child(1), + .weather_box:nth-child(2) { + margin-bottom: 10px; + } + .category { + display: block; + text-align: center; + } + .cat-item { + width: 70%; + margin-left: auto; + margin-right: auto; + } + .cat-item.active { + margin: 20px auto; + } + .cat-icon img { + max-width: unset; + width: 70%; + margin: 20px 0; + } + .graph { + display: block; + padding: 0; + } + .graph-item { + margin: 20px 0; + } + .category-table td { + min-width: 80px; + } +} +/* responsiv end */ \ No newline at end of file diff --git a/frontend/nginx/favicon.ico b/web/static/favicon.ico similarity index 100% rename from frontend/nginx/favicon.ico rename to web/static/favicon.ico diff --git a/frontend/nginx/img/cloud.png b/web/static/img/cloud.png similarity index 100% rename from frontend/nginx/img/cloud.png rename to web/static/img/cloud.png diff --git a/frontend/nginx/img/cloud_colors.gif b/web/static/img/cloud_colors.gif similarity index 100% rename from frontend/nginx/img/cloud_colors.gif rename to web/static/img/cloud_colors.gif diff --git a/frontend/nginx/img/icon/000.png b/web/static/img/icon/000.png similarity index 100% rename from frontend/nginx/img/icon/000.png rename to web/static/img/icon/000.png diff --git a/frontend/nginx/img/icon/01d.png b/web/static/img/icon/01d.png similarity index 100% rename from frontend/nginx/img/icon/01d.png rename to web/static/img/icon/01d.png diff --git a/frontend/nginx/img/icon/01n.png b/web/static/img/icon/01n.png similarity index 100% rename from frontend/nginx/img/icon/01n.png rename to web/static/img/icon/01n.png diff --git a/frontend/nginx/img/icon/02d.png b/web/static/img/icon/02d.png similarity index 100% rename from frontend/nginx/img/icon/02d.png rename to web/static/img/icon/02d.png diff --git a/frontend/nginx/img/icon/02n.png b/web/static/img/icon/02n.png similarity index 100% rename from frontend/nginx/img/icon/02n.png rename to web/static/img/icon/02n.png diff --git a/frontend/nginx/img/icon/03d.png b/web/static/img/icon/03d.png similarity index 100% rename from frontend/nginx/img/icon/03d.png rename to web/static/img/icon/03d.png diff --git a/frontend/nginx/img/icon/03n.png b/web/static/img/icon/03n.png similarity index 100% rename from frontend/nginx/img/icon/03n.png rename to web/static/img/icon/03n.png diff --git a/frontend/nginx/img/icon/04d.png b/web/static/img/icon/04d.png similarity index 100% rename from frontend/nginx/img/icon/04d.png rename to web/static/img/icon/04d.png diff --git a/frontend/nginx/img/icon/04n.png b/web/static/img/icon/04n.png similarity index 100% rename from frontend/nginx/img/icon/04n.png rename to web/static/img/icon/04n.png diff --git a/frontend/nginx/img/icon/09d.png b/web/static/img/icon/09d.png similarity index 100% rename from frontend/nginx/img/icon/09d.png rename to web/static/img/icon/09d.png diff --git a/frontend/nginx/img/icon/09n.png b/web/static/img/icon/09n.png similarity index 100% rename from frontend/nginx/img/icon/09n.png rename to web/static/img/icon/09n.png diff --git a/frontend/nginx/img/icon/10d.png b/web/static/img/icon/10d.png similarity index 100% rename from frontend/nginx/img/icon/10d.png rename to web/static/img/icon/10d.png diff --git a/frontend/nginx/img/icon/10n.png b/web/static/img/icon/10n.png similarity index 100% rename from frontend/nginx/img/icon/10n.png rename to web/static/img/icon/10n.png diff --git a/frontend/nginx/img/icon/11d.png b/web/static/img/icon/11d.png similarity index 100% rename from frontend/nginx/img/icon/11d.png rename to web/static/img/icon/11d.png diff --git a/frontend/nginx/img/icon/11n.png b/web/static/img/icon/11n.png similarity index 100% rename from frontend/nginx/img/icon/11n.png rename to web/static/img/icon/11n.png diff --git a/frontend/nginx/img/icon/13d.png b/web/static/img/icon/13d.png similarity index 100% rename from frontend/nginx/img/icon/13d.png rename to web/static/img/icon/13d.png diff --git a/frontend/nginx/img/icon/13n.png b/web/static/img/icon/13n.png similarity index 100% rename from frontend/nginx/img/icon/13n.png rename to web/static/img/icon/13n.png diff --git a/frontend/nginx/img/icon/50d.png b/web/static/img/icon/50d.png similarity index 100% rename from frontend/nginx/img/icon/50d.png rename to web/static/img/icon/50d.png diff --git a/frontend/nginx/img/icon/50n.png b/web/static/img/icon/50n.png similarity index 100% rename from frontend/nginx/img/icon/50n.png rename to web/static/img/icon/50n.png diff --git a/frontend/nginx/img/icon/category-good.png b/web/static/img/icon/category-good.png similarity index 100% rename from frontend/nginx/img/icon/category-good.png rename to web/static/img/icon/category-good.png diff --git a/frontend/nginx/img/icon/category-hazardous.png b/web/static/img/icon/category-hazardous.png similarity index 100% rename from frontend/nginx/img/icon/category-hazardous.png rename to web/static/img/icon/category-hazardous.png diff --git a/frontend/nginx/img/icon/category-moderate.png b/web/static/img/icon/category-moderate.png similarity index 100% rename from frontend/nginx/img/icon/category-moderate.png rename to web/static/img/icon/category-moderate.png diff --git a/frontend/nginx/img/icon/category-plain.png b/web/static/img/icon/category-plain.png similarity index 100% rename from frontend/nginx/img/icon/category-plain.png rename to web/static/img/icon/category-plain.png diff --git a/frontend/nginx/img/icon/category-unhealthy.png b/web/static/img/icon/category-unhealthy.png similarity index 100% rename from frontend/nginx/img/icon/category-unhealthy.png rename to web/static/img/icon/category-unhealthy.png diff --git a/frontend/nginx/img/icon/category-unhealthyforsensitivegroups.png b/web/static/img/icon/category-unhealthyforsensitivegroups.png similarity index 100% rename from frontend/nginx/img/icon/category-unhealthyforsensitivegroups.png rename to web/static/img/icon/category-unhealthyforsensitivegroups.png diff --git a/frontend/nginx/img/icon/category-veryunhealthy.png b/web/static/img/icon/category-veryunhealthy.png similarity index 100% rename from frontend/nginx/img/icon/category-veryunhealthy.png rename to web/static/img/icon/category-veryunhealthy.png diff --git a/frontend/nginx/img/icon/humidity.png b/web/static/img/icon/humidity.png similarity index 100% rename from frontend/nginx/img/icon/humidity.png rename to web/static/img/icon/humidity.png diff --git a/frontend/nginx/img/icon/pressure.png b/web/static/img/icon/pressure.png similarity index 100% rename from frontend/nginx/img/icon/pressure.png rename to web/static/img/icon/pressure.png diff --git a/frontend/nginx/img/icon/wind.png b/web/static/img/icon/wind.png similarity index 100% rename from frontend/nginx/img/icon/wind.png rename to web/static/img/icon/wind.png diff --git a/frontend/nginx/img/lightbox/close.png b/web/static/img/lightbox/close.png similarity index 100% rename from frontend/nginx/img/lightbox/close.png rename to web/static/img/lightbox/close.png diff --git a/frontend/nginx/img/lightbox/loading.gif b/web/static/img/lightbox/loading.gif similarity index 100% rename from frontend/nginx/img/lightbox/loading.gif rename to web/static/img/lightbox/loading.gif diff --git a/frontend/nginx/img/lightbox/next.png b/web/static/img/lightbox/next.png similarity index 100% rename from frontend/nginx/img/lightbox/next.png rename to web/static/img/lightbox/next.png diff --git a/frontend/nginx/img/lightbox/prev.png b/web/static/img/lightbox/prev.png similarity index 100% rename from frontend/nginx/img/lightbox/prev.png rename to web/static/img/lightbox/prev.png diff --git a/frontend/nginx/img/social_preview.jpg b/web/static/img/social_preview.jpg similarity index 100% rename from frontend/nginx/img/social_preview.jpg rename to web/static/img/social_preview.jpg diff --git a/frontend/nginx/js/aqi.js b/web/static/js/aqi.js similarity index 94% rename from frontend/nginx/js/aqi.js rename to web/static/js/aqi.js index 8b6bd34..8698e45 100644 --- a/frontend/nginx/js/aqi.js +++ b/web/static/js/aqi.js @@ -64,9 +64,9 @@ function rmPreload() { // scrollbar document.querySelector('body').style.overflow = 'unset' // sticky - const topBar = document.querySelector('.colorbox'); - topBar.style.position = 'sticky'; - topBar.style.position = '-webkit-sticky'; + // const topBar = document.querySelector('.colorbox'); + // topBar.style.position = 'sticky'; + // topBar.style.position = '-webkit-sticky'; } // reload current.png from remote @@ -88,7 +88,7 @@ function refreshAqiValues() { return new Promise((resolve, reject) => { var req = new XMLHttpRequest(); req.responseType = 'json'; - req.open('GET', '/dyn/air.json', true); + req.open('GET', '/data/out', true); req.setRequestHeader('cache-control', 'no-cache'); req.onload = function() { var responseAqi = req.response; @@ -149,7 +149,7 @@ function setAqiColors(aqiCategory) { }; } // apply light background change - var lightBg = document.getElementsByClassName('light_background'); + var lightBg = document.getElementsByClassName('col_lightbg'); if (lightBg) { for (var i = 0; i < lightBg.length; i++) { lightBg[i].style.backgroundColor = colSecond; @@ -170,7 +170,7 @@ function setAqiColors(aqiCategory) { }; } // apply hover color - var css = '.nav li:hover {background-color: ' + colMain + ';}'; + var css = '.title-nav li:hover {background-color: ' + colMain + ';}'; var style = document.createElement('style'); style.appendChild(document.createTextNode(css)); document.getElementsByTagName('head')[0].appendChild(style); @@ -185,7 +185,7 @@ function setWeatherDetails(responseAqi) { var humidity = Math.round(responseAqi['humidity']); var pressure = Math.round(responseAqi['pressure']); // weather icon - weatherIconSrc = '/img/icon/' + weatherIcon + '.png'; + weatherIconSrc = '/static/img/icon/' + weatherIcon + '.png'; document.getElementById('weather_icon').src = weatherIconSrc; // weather name document.getElementById('weather_name').innerHTML = weatherName; @@ -203,7 +203,7 @@ function setDesc(responseAqi) { // parse response var aqiCategory = responseAqi['aqi_category']; var aqiCatClean = aqiCategory.toLowerCase().replaceAll(' ', ''); - var iconSrc = '/img/icon/category-' + aqiCatClean + ".png"; + var iconSrc = '/static/img/icon/category-' + aqiCatClean + ".png"; // parse config var aqiRange = colorConfig[aqiCategory][3]; var aqiDesc = colorConfig[aqiCategory][4]; @@ -220,7 +220,7 @@ function setDesc(responseAqi) { // figure out which to activate var allCategories = Object.keys(colorConfig); var indexMatch = allCategories.indexOf(aqiCategory); - var activeCat = document.getElementsByClassName('desc_item')[indexMatch]; + var activeCat = document.getElementsByClassName('cat-item')[indexMatch]; // activate activeCat.classList.add("active"); } diff --git a/frontend/nginx/js/lightbox.js b/web/static/js/lightbox.js similarity index 100% rename from frontend/nginx/js/lightbox.js rename to web/static/js/lightbox.js diff --git a/web/templates/about.html b/web/templates/about.html new file mode 100644 index 0000000..6f975ad --- /dev/null +++ b/web/templates/about.html @@ -0,0 +1,55 @@ +{% extends "layout.html" %} +{% block content %} +
+
+

About

+

This page and its contents are still under construction. More content is coming soon.

+

The data for this page is collected from an air monitor located just outside of Luang Prabang, Laos. While we do our best, no guarantee is given for the accuracy of this data.

+

The data is updated every 3 minutes. Contrary to international websites who measure the air pollution via satellite images and rely on estimates and averages, an on-site air monitor delivers real time values that paint a much more accurate picture of the local situation.

+

Roughly, the Air Quality Index (AQI) is the internationally used air quality standard to measure the pollution of the air. It is divided into 6 levels, and according to these levels, certain health advices are given:

+
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AQI ValuesDescription
0 to 50:Good: No health concerns, enjoy activities.
51 - 100:Moderate: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.
101 - 150:Unhealthy for Sensitive Groups: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.
151 - 200:Unhealthy: Everyone may begin to experience health effects: Active children and adults, and people with respiratory disease, such as asthma, should avoid prolonged outdoor exertion; everyone else, especially children, should limit prolonged outdoor exertion.
201 - 300:Very Unhealthy: Active children and adults, and people with respiratory disease, such as asthma, should avoid all outdoor exertion; everyone else, especially children, should limit outdoor exertion.
301 - 500:Hazardous: Everyone should avoid all outdoor exertion.
+
+
+
+
+
+

Credits

+

Partial Weather data, namely weather icon, weather description and windspeed are provided from openweather.org API distributed under the Creative Commons Attribution-ShareAlike 4.0 Generic License.

+

Lightbox made by Lokesh Dhakar, released under the MIT license.

+
+
+{% endblock content %} \ No newline at end of file diff --git a/web/templates/graphs.html b/web/templates/graphs.html new file mode 100644 index 0000000..5d035a0 --- /dev/null +++ b/web/templates/graphs.html @@ -0,0 +1,103 @@ +{% extends "layout.html" %} +{% block content %} +
+
+

Graphs

+

All the graphs and table on this page will get recreated every night with the newest values.

+
+
+

Last three days

+
+
+
+

Three days ago

+ + Three days ago graph + +
+
+

Two days ago

+ + Two days ago graph + +
+
+

Yesterday

+ + Yesterday graph + +
+
+
+
+
+
+

Particle Matter sizes

+

There is no healthy level of pollution. Particle matter (PM) are defined in two different sizes: PM 2.5 which represents particle sizes smaller than 2.5 µm or less than 1/20th of the diameter of a human hair and PM 10 which represents particle sizer smaller than 10 µm or 1/5th of the diameter of a human hair.

+

The WHO is providing more details on their website regarding particle matter and their health implications. On Wikipedia there are some interesting links to studies for further reading.

+
+
+
+ + pm 2.5 value graph + +
+
+

PM 2.5

+

Particle matter sizes smaller than 2.5µm are the most problematic as these particles will find their way through the lungs into the bloodstream.

+

The WHO Air quality guideline values set a 25 µg/m³ 24-hour average as an upper level threshold. In the 10 days overview you can see:

+

Green: Daily average exposure below 25 µg/m³
+ Red: Daily average exposure above 25 µg/m³

+
+
+
+
+ + pm 10 value graph + +
+
+

PM 10

+

The threshold for the daily average PM 10 exposure is set to 50 µg/m³ by the WHO. Particles this size can penetrate and lodge deep inside the lungs but are too big to enter the blood stream. For this reason the threshold is higher.

+

In the 10 days overview you can see:

+

Green: Daily average exposure below 50 µg/m³
+ Red: Daily average exposure above 50 µg/m³

+
+
+
+
+
+
+

Hour by Hour

+
+
+
+ + hour by hour graph + +
+
+

Hourly AQI average

+

The AQI value can change a lot during the day. This can depend on the wind, cooking on fire or just another additional source of pollution nearby.

+

In this chart you can see the average AQI for each hour. This data is based on the last three days. This data can help to visualize which hours in the past three days have been better or worse on average.

+
+
+
+
+
+
+

Compared to last year

+

This year's daily average AQI values from last 10 days compared to corresponding values from last year.

+
+
+
+ {{ table }} +
+
+ + year comparison graph + +
+
+
+{% endblock content %} \ No newline at end of file diff --git a/web/templates/home.html b/web/templates/home.html new file mode 100644 index 0000000..5a723cf --- /dev/null +++ b/web/templates/home.html @@ -0,0 +1,103 @@ +{% extends "layout.html" %} +{% block content %} +
+
+
+ cloud +
+

+

US AQI

+

+
+
+
+
+
+
+
+
+ weather_icon +
+
+

°C

+

+
+
+
+
+ wind_icon +
+
+

Wind

+

km/h

+
+
+
+
+ humidity_icon +
+
+

Humidity

+

%

+
+
+
+
+ pressure_icon +
+
+

Pressure

+

mbar

+
+
+
+
+
+
+
+
+

Good

+
+
+

Moderate

+
+
+

Unhealthy for Sensitive Groups

+
+
+

Unhealthy

+
+
+

Very Unhealthy

+
+
+

Hazardous

+
+
+
+ category_icon +
+
+

+

+

+
+
+
+
+
+
+

Last three hours

+ + current graph + +
+
+

Last 7 days

+ + last-7 graph + +
+
+
+{% endblock content %} \ No newline at end of file diff --git a/web/templates/layout.html b/web/templates/layout.html new file mode 100644 index 0000000..cc491a5 --- /dev/null +++ b/web/templates/layout.html @@ -0,0 +1,53 @@ + + + + + + {% if title %} + AQI - {{ title }} + {% else %} + AQI + {% endif %} + + + + + + + +
+ cloud_animation +
+
+
+ +
+
+
+
+
+

Live Air Quality

+

in Luang Prabang Laos PDR

+

Last updated:

+
+
+ +
+
+
+ {% block content %}{% endblock %} +
+
+

© 2021 | Documentation

+
+
+ + \ No newline at end of file diff --git a/web/templates/monthly.html b/web/templates/monthly.html new file mode 100644 index 0000000..4a061dd --- /dev/null +++ b/web/templates/monthly.html @@ -0,0 +1,22 @@ +{% extends "layout.html" %} +{% block content %} +
+
+

Month by month

+

Month compared to last year. Values are in 8h average.

+
+ {% for month in months %} +
+

{{month.month_name}}

+
+
+
+ {{month.month_name}} graph +
+
+ {{month.table}} +
+
+ {% endfor %} +
+{% endblock content %} \ No newline at end of file diff --git a/backend/flask/app.ini b/web/uwsgi.ini similarity index 74% rename from backend/flask/app.ini rename to web/uwsgi.ini index 4e98651..24eadfd 100644 --- a/backend/flask/app.ini +++ b/web/uwsgi.ini @@ -1,5 +1,5 @@ [uwsgi] -wsgi-file = run.py +wsgi-file = views.py callable = app socket = :8080 processes = 4 @@ -7,4 +7,4 @@ threads = 2 master = true chmod-socket = 660 vacuum = true -die-on-term = true +die-on-term = true \ No newline at end of file diff --git a/web/views.py b/web/views.py new file mode 100644 index 0000000..1b60def --- /dev/null +++ b/web/views.py @@ -0,0 +1,114 @@ +""" main entry page to handle all the routes """ + +import os +from datetime import datetime + +from flask import Flask, render_template, request, app +from flask import url_for # pylint: disable=unused-import +from flask_httpauth import HTTPBasicAuth +from apscheduler.schedulers.background import BackgroundScheduler + + +from src.helper import Table, get_config +from src.db import get_current, insert_data +from src.graph_current import main as current_graph +from src.graph_nightly import main as nightly_graph +from src.graph_monthly import main as monthly_graph + +import matplotlib +matplotlib.use('Agg') + +# start up +app = Flask(__name__) + +CONFIG = get_config() +auth = HTTPBasicAuth() +aqi_user = CONFIG['aqi_monitor'] +USER_DATA = { + aqi_user['authUsername']: aqi_user['authPassword'] +} + +# initial export +print('initial export') +current_graph() +nightly_graph() +monthly_graph() + +# start scheduler +scheduler = BackgroundScheduler() +scheduler.add_job( + current_graph, trigger="cron", minute='*/5', name='current_graph' +) +scheduler.add_job( + nightly_graph, trigger="cron", day='*', hour='1', minute='1', name='night' +) +scheduler.add_job( + nightly_graph, trigger="cron", day='*', hour='1', minute='2', name='month' +) +scheduler.start() + + +@auth.verify_password +def verify(username, password): + """ get password """ + if not (username and password): + return False + return USER_DATA.get(username) == password + + +@app.route("/") +def home(): + """ home page """ + return render_template('home.html') + + +@app.route("/about") +def about(): + """ about page """ + return render_template('about.html', title='About') + + +@app.route("/graphs") +def graphs(): + """ graphs page """ + table = Table('static/dyn/year-table.json').create_table() + return render_template('graphs.html', title='Graphs', table=table) + + +@app.route("/monthly") +def monthly(): + """ monthly statistics page """ + months = [i for i in os.listdir('static/dyn/monthly') if '.json' in i] + months.sort(reverse=True) + + month_dicts = [] + for month in months: + month_clean = os.path.splitext(month)[0] + month_graph = os.path.join('static/dyn/monthly', month_clean + '.png') + month_name = datetime.strptime(month_clean, "%Y-%m").strftime('%B %Y') + month_json = os.path.join('static/dyn/monthly', month) + table = Table(month_json).create_table() + month_dict = { + 'month_graph': month_graph, + 'month_name': month_name, + 'table': table + } + month_dicts.append(month_dict) + + return render_template('monthly.html', title='Monthly', months=month_dicts) + + +@app.route("/data/in", methods=['POST']) +@auth.login_required +def ingest(): + """ handle post request from monitor """ + post_data = request.json + insert_data(post_data) + return 'ingest' + + +@app.route("/data/out") +def data(): + """ return data from db """ + json_data = get_current() + return json_data