major app rebuild into a single flask application
|
@ -2,7 +2,7 @@
|
|||
__pycache__
|
||||
|
||||
# protect real config files
|
||||
config
|
||||
config.json
|
||||
config.h
|
||||
|
||||
# env files
|
||||
|
@ -12,6 +12,4 @@ postgres.env
|
|||
*.ttf
|
||||
|
||||
# dynamic files
|
||||
**/dyn/*.json
|
||||
**/dyn/
|
||||
update.html
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
__pycache__
|
||||
/dyn/*
|
|
@ -1,16 +0,0 @@
|
|||
# Use the Python3.7.2 image
|
||||
FROM python
|
||||
|
||||
# Set the working directory to /app
|
||||
WORKDIR /app
|
||||
|
||||
VOLUME /app/dyn
|
||||
|
||||
# Copy the current directory contents into the container at /app
|
||||
ADD . /app
|
||||
|
||||
# Install the dependencies
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
# run the command to start uWSGI
|
||||
CMD ["uwsgi", "app.ini"]
|
|
@ -1,5 +0,0 @@
|
|||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
from app import views
|
|
@ -1,88 +0,0 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
import numpy as np
|
||||
|
||||
|
||||
def input_process(data):
|
||||
"""
|
||||
parsing aqi post data and combine it with weather data
|
||||
return: dict of combined values
|
||||
"""
|
||||
# error check
|
||||
error_found = False
|
||||
# get weather data
|
||||
try:
|
||||
with open('dyn/weather.json', 'r') as f:
|
||||
weather_data = f.read()
|
||||
weather_data_json = json.loads(weather_data)
|
||||
del weather_data_json['timestamp']
|
||||
del weather_data_json['epoch_time']
|
||||
except FileNotFoundError:
|
||||
# will get recreated on next run
|
||||
weather_data_json = {}
|
||||
# parse aqi data
|
||||
json_dict = data
|
||||
pm25 = json_dict['pm25']
|
||||
aqi, aqi_category = get_AQI(pm25)
|
||||
json_dict['aqi_value'] = float(aqi)
|
||||
json_dict['aqi_category'] = aqi_category
|
||||
if pm25 == 0:
|
||||
# something went wrong
|
||||
error_found = True
|
||||
# set timestamp
|
||||
now = datetime.now()
|
||||
timestamp = now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
epoch_time = int(now.strftime('%s'))
|
||||
json_dict['timestamp'] = timestamp
|
||||
json_dict['epoch_time'] = epoch_time
|
||||
# combine the two and return
|
||||
json_dict.update(weather_data_json)
|
||||
return json_dict, error_found
|
||||
|
||||
|
||||
def get_AQI(pm25):
|
||||
""" takes the pm2.5 value and returns AQI and AQI category """
|
||||
if pm25 <= 12:
|
||||
aqi = (pm25 / 12) * 50
|
||||
aqi_category = "Good"
|
||||
elif pm25 > 12 and pm25 <= 35.4:
|
||||
perc = (pm25 - 12) / (35.4 - 12)
|
||||
aqi = (100 - 50) * perc + 50
|
||||
aqi_category = "Moderate"
|
||||
elif pm25 > 35.4 and pm25 <= 55.4:
|
||||
perc = (pm25 - 35.4) / (55.4 - 35.4)
|
||||
aqi = (150 - 100) * perc + 100
|
||||
aqi_category = "Unhealthy for Sensitive Groups"
|
||||
elif pm25 > 55.4 and pm25 <= 150.4:
|
||||
perc = (pm25 - 55.4) / (150.4 - 55.4)
|
||||
aqi = (200 - 150) * perc + 150
|
||||
aqi_category = "Unhealthy"
|
||||
elif pm25 > 150.4 and pm25 <= 199.9:
|
||||
perc = (pm25 - 150.4) / (199.9 - 150.4)
|
||||
aqi = (250 - 200) * perc + 200
|
||||
aqi_category = "Very Unhealthy"
|
||||
elif pm25 > 199.9 and pm25 <= 250.4:
|
||||
perc = (pm25 - 199.9) / (250.4 - 199.9)
|
||||
aqi = (300 - 250) * perc + 250
|
||||
aqi_category = "Very Unhealthy"
|
||||
elif pm25 > 250.4 and pm25 <= 299.9:
|
||||
perc = (pm25 - 250.4) / (299.9 - 250.4)
|
||||
aqi = (350 - 300) * perc + 300
|
||||
aqi_category = "Hazardous"
|
||||
elif pm25 > 299.9 and pm25 <= 350.4:
|
||||
perc = (pm25 - 299.9) / (350.4 - 299.9)
|
||||
aqi = (400 - 350) * perc + 350
|
||||
aqi_category = "Hazardous"
|
||||
elif pm25 > 350.4 and pm25 <= 424.6:
|
||||
perc = (pm25 - 350.4) / (424.6 - 350.4)
|
||||
aqi = (450 - 400) * perc + 400
|
||||
aqi_category = "Hazardous"
|
||||
elif pm25 > 424.6 and pm25 <= 500.4:
|
||||
perc = (pm25 - 424.6) / (500.4 - 424.6)
|
||||
aqi = (500 - 450) * perc + 450
|
||||
aqi_category = "Hazardous"
|
||||
elif pm25 > 500.4:
|
||||
aqi = pm25
|
||||
aqi_category = "Hazardous"
|
||||
aqi = np.round_(int(aqi), decimals=0, out=None)
|
||||
return aqi, aqi_category
|
|
@ -1,71 +0,0 @@
|
|||
""" handles insert into postgres db """
|
||||
|
||||
import psycopg2
|
||||
|
||||
|
||||
def db_connect(config):
|
||||
""" returns connection and curser """
|
||||
# set config
|
||||
db_host = config['db_host']
|
||||
db_database = config['db_database']
|
||||
db_user = config['db_user']
|
||||
db_password = config['db_password']
|
||||
# Connect to database
|
||||
conn = psycopg2.connect(
|
||||
host = db_host,
|
||||
database = db_database,
|
||||
user = db_user,
|
||||
password = db_password
|
||||
)
|
||||
# Open a cursor to perform database operations
|
||||
cur = conn.cursor()
|
||||
return conn, cur
|
||||
|
||||
|
||||
def db_close(conn, cur):
|
||||
""" clean close the conn and curser """
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def db_insert(config, json_dict):
|
||||
""" make the db insert """
|
||||
# read out data dict
|
||||
uptime = json_dict['uptime']
|
||||
temperature = json_dict['temperature']
|
||||
pressure = json_dict['pressure']
|
||||
humidity = json_dict['humidity']
|
||||
pm25 = json_dict['pm25']
|
||||
pm10 = json_dict['pm10']
|
||||
aqi_value = json_dict['aqi_value']
|
||||
aqi_category = json_dict['aqi_category']
|
||||
time_stamp = json_dict['timestamp']
|
||||
epoch_time = json_dict['epoch_time']
|
||||
weather_name = json_dict['weather_name']
|
||||
weather_icon = json_dict['weather_icon']
|
||||
wind_speed = json_dict['wind_speed']
|
||||
wind_direction = json_dict['wind_direction']
|
||||
sensor_id = json_dict['sensor_id']
|
||||
|
||||
# connect
|
||||
conn, cur = db_connect(config)
|
||||
# insert aqi
|
||||
cur.execute("INSERT INTO aqi \
|
||||
(epoch_time, sensor_id, time_stamp, uptime, pm25, pm10, aqi_value, aqi_category) \
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
(epoch_time, sensor_id, time_stamp, uptime, pm25, pm10, aqi_value, aqi_category)
|
||||
)
|
||||
# insert weather
|
||||
cur.execute("INSERT INTO weather \
|
||||
(epoch_time, sensor_id, time_stamp, temperature, pressure, humidity, \
|
||||
wind_speed, wind_direction, weather_name, weather_icon) \
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
||||
(epoch_time, sensor_id, time_stamp, temperature, pressure, humidity,
|
||||
wind_speed, wind_direction, weather_name, weather_icon)
|
||||
)
|
||||
|
||||
# close
|
||||
db_close(conn, cur)
|
||||
|
||||
return time_stamp
|
|
@ -1,223 +0,0 @@
|
|||
""" makes the nice plots """
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import psycopg2
|
||||
|
||||
from app.db_connect import db_connect, db_close
|
||||
|
||||
|
||||
def create_current(config):
|
||||
""" recreate current graph """
|
||||
# last three hours
|
||||
now = datetime.now()
|
||||
now_human = now.strftime('%c')
|
||||
now_epoch = int(now.strftime('%s'))
|
||||
last_3h = now_epoch - 3 * 60 * 60
|
||||
last_3h_limit = int(60 * 3)
|
||||
# connect
|
||||
conn, cur = db_connect(config)
|
||||
# get data
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {last_3h} ORDER BY epoch_time DESC \
|
||||
LIMIT {last_3h_limit};')
|
||||
rows = cur.fetchall()
|
||||
# close db
|
||||
db_close(conn, cur)
|
||||
# set title
|
||||
time_from = datetime.fromtimestamp(rows[-1][0]).strftime('%H:%M')
|
||||
time_until = datetime.fromtimestamp(rows[0][0]).strftime('%H:%M')
|
||||
plt_title = f'AQI values last 3h: {time_from} - {time_until}'
|
||||
# parse rows
|
||||
sample_rate = '3min'
|
||||
x, y = build_plt(rows, sample_rate, '%H:%M')
|
||||
# calc x_ticks
|
||||
x_ticks = []
|
||||
for num, i in enumerate(x):
|
||||
minute = int(i.split(':')[1])
|
||||
if minute % 15 == 0:
|
||||
x_ticks.append(num)
|
||||
# write plt
|
||||
file_name = 'current'
|
||||
write_plt(x, y, plt_title, x_ticks, file_name)
|
||||
message = f'recreated current graph: {now_human}'
|
||||
print(message)
|
||||
|
||||
|
||||
def rebuild_3days(config):
|
||||
""" wrapper to recreate all three days of graphs """
|
||||
now = datetime.now()
|
||||
# get axis
|
||||
x_1, y_1, plt_title_1, x_ticks_1 = get_axis(1, now, config)
|
||||
x_2, y_2, plt_title_2, x_ticks_2 = get_axis(2, now, config)
|
||||
x_3, y_3, plt_title_3, x_ticks_3 = get_axis(3, now, config)
|
||||
# set max
|
||||
y_max = max(y_1.append(y_2).append(y_3)) + 50
|
||||
# write plot
|
||||
write_plt(x_1, y_1, plt_title_1, x_ticks_1, 'day-1', y_max)
|
||||
write_plt(x_2, y_2, plt_title_2, x_ticks_2, 'day-2', y_max)
|
||||
write_plt(x_3, y_3, plt_title_3, x_ticks_3, 'day-3', y_max)
|
||||
print('recreaded last three days plt')
|
||||
|
||||
|
||||
def get_axis(day, now, config):
|
||||
""" recreate plot for single days """
|
||||
day_delta = now.date() - timedelta(days = day)
|
||||
day_from = int(day_delta.strftime('%s'))
|
||||
day_until = int(day_delta.strftime('%s')) + 60 * 60 * 24
|
||||
# make the SELECT
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {day_from} \
|
||||
AND epoch_time < {day_until} \
|
||||
ORDER BY epoch_time DESC LIMIT 720;'
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
db_close(conn, cur)
|
||||
# title
|
||||
time_stamp = day_delta.strftime('%Y-%m-%d')
|
||||
plt_title = f'AQI values from: {time_stamp}'
|
||||
# build plt
|
||||
x_ticks = np.arange(0, 97, step=8)
|
||||
sample_rate = '15min'
|
||||
x, y = build_plt(rows, sample_rate, '%H:%M')
|
||||
return x, y, plt_title, x_ticks
|
||||
|
||||
|
||||
def rebuild_7days(config):
|
||||
""" recreate last-7 days from db """
|
||||
# setup
|
||||
now = datetime.now()
|
||||
day_until = int(now.date().strftime('%s'))
|
||||
day_from = day_until - 7 * 24 * 60 * 60
|
||||
# get data
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {day_from} \
|
||||
AND epoch_time < {day_until} \
|
||||
ORDER BY epoch_time DESC LIMIT 30 * 24 * 7;'
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
db_close(conn, cur)
|
||||
# title
|
||||
date_from = datetime.fromtimestamp(rows[-1][0]).strftime('%d %b')
|
||||
date_until = datetime.fromtimestamp(rows[0][0]).strftime('%d %b')
|
||||
plt_title = f'AQI values from: {date_from} until {date_until}'
|
||||
# build axis of plot
|
||||
x, y_1, y_2 = build_last7_plt(rows)
|
||||
# make ticks
|
||||
x_range = np.arange(0, 84, step=12)
|
||||
x_date_time = pd.to_datetime(x).dt.date.unique()
|
||||
x_dates = np.asarray([i.strftime('%d %b') for i in x_date_time])
|
||||
x_ticks = x_range, x_dates
|
||||
# write the plot
|
||||
write_last7_plt(x, y_1, y_2, x_ticks, plt_title)
|
||||
print('recreaded last-7 days graph')
|
||||
|
||||
|
||||
def build_plt(rows, sample_rate, time_format):
|
||||
""" parse rows returns axis"""
|
||||
# build x y
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows]
|
||||
y_aqi_values = [int(i[1]) for i in rows]
|
||||
# build dataframe
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
# reindex as timeseries
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample(sample_rate).mean()
|
||||
mean.interpolate(method='linear', limit=1, inplace=True, limit_area='inside')
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime(time_format)
|
||||
mean['aqi'] = mean['aqi'].round()
|
||||
# set axis
|
||||
x = mean['timestamp']
|
||||
y = mean['aqi']
|
||||
return x, y
|
||||
|
||||
|
||||
def build_last7_plt(rows):
|
||||
""" build axis for last7 plot """
|
||||
sample_rate = '2h'
|
||||
# build x y
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows]
|
||||
y_aqi_values = [int(i[1]) for i in rows]
|
||||
# build dataframe
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample(sample_rate).mean()
|
||||
mean['avg'] = mean['aqi'].resample('1d').mean()
|
||||
mean['avg'] = mean.avg.shift(6)
|
||||
|
||||
mean['avg'][0] = (mean['avg'].iloc[6] + mean['aqi'][0]) / 2
|
||||
mean['avg'][-1] = (mean['avg'].iloc[-6] + mean['aqi'][-1]) / 2
|
||||
|
||||
mean['avg'].interpolate(method='polynomial', order=3, inplace=True)
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%Y-%m-%d %H:%M')
|
||||
mean['aqi'] = mean['aqi'].round()
|
||||
mean['avg'] = mean['avg'].round()
|
||||
x = mean['timestamp']
|
||||
y_1 = mean['aqi']
|
||||
y_2 = mean['avg']
|
||||
return x, y_1, y_2
|
||||
|
||||
|
||||
def write_plt(x, y, plt_title, x_ticks, file_name, y_max=''):
|
||||
""" save plot to file """
|
||||
# calc ticks
|
||||
if not y_max:
|
||||
y_max = np.ceil(y.max()/50)*50 + 50
|
||||
# setup plot
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y, color='#313131',)
|
||||
plt.fill_between(x, y, y2=0, where=(y > 0), color='#85a762', interpolate=True) # good
|
||||
plt.fill_between(x, y, y2=50, where=(y > 50), color='#d4b93c', interpolate=True) # moderate
|
||||
plt.fill_between(x, y, y2=100, where=(y > 100), color='#e96843', interpolate=True) # ufsg
|
||||
plt.fill_between(x, y, y2=150, where=(y > 150), color='#d03f3b', interpolate=True) # unhealthy
|
||||
plt.fill_between(x, y, y2=200, where=(y > 200), color='#be4173', interpolate=True) # vunhealthy
|
||||
plt.fill_between(x, y, y2=300, where=(y > 300), color='#714261', interpolate=True) # hazardous
|
||||
plt.fill_between(x, y, y2=0, where=(y > 0), color='#ffffff', alpha=0.1, interpolate=True) # soft
|
||||
# handle passing ticks and lables separatly
|
||||
if len(x_ticks) == 2:
|
||||
plt.xticks(x_ticks[0], x_ticks[1])
|
||||
else:
|
||||
plt.xticks(x_ticks)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig(f'dyn/{file_name}.png', dpi = 300)
|
||||
plt.figure()
|
||||
plt.close('all')
|
||||
|
||||
|
||||
def write_last7_plt(x, y_1, y_2, x_ticks, plt_title):
|
||||
""" plot last-7 only """
|
||||
y_max = np.ceil(max(y_1.append(y_2))/50)*50 + 50
|
||||
# plot
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y_1, color='#313131', label='2hour avg')
|
||||
plt.plot(x, y_2, color='#cc0000', label='daily avg')
|
||||
plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#85a762', interpolate=True) # good
|
||||
plt.fill_between(x, y_1, y2=50, where=(y_1 > 50), color='#d4b93c', interpolate=True) # moderate
|
||||
plt.fill_between(x, y_1, y2=100, where=(y_1 > 100), color='#e96843', interpolate=True) # ufsg
|
||||
plt.fill_between(x, y_1, y2=150, where=(y_1 > 150), color='#d03f3b', interpolate=True) # unhealthy
|
||||
plt.fill_between(x, y_1, y2=200, where=(y_1 > 200), color='#be4173', interpolate=True) # vunhealthy
|
||||
plt.fill_between(x, y_1, y2=300, where=(y_1 > 300), color='#714261', interpolate=True) # hazardous
|
||||
plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#ffffff', alpha=0.1, interpolate=True) # soft
|
||||
plt.xticks(x_ticks[0], x_ticks[1])
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.legend()
|
||||
plt.tight_layout()
|
||||
plt.savefig('dyn/last-7.png', dpi = 300)
|
||||
plt.figure()
|
|
@ -1,215 +0,0 @@
|
|||
""" handles monthly tasks """
|
||||
|
||||
import calendar
|
||||
import json
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from matplotlib import pyplot as plt
|
||||
from os import path
|
||||
|
||||
from app.db_connect import db_connect, db_close
|
||||
|
||||
|
||||
def get_epoch():
|
||||
""" returns epoch for last month and last month last year """
|
||||
# run within first 7 days of month
|
||||
now = datetime.now()
|
||||
# last month
|
||||
last_day = now.replace(day=1) - timedelta(days=1)
|
||||
month_start = last_day.replace(day=1,hour=0,minute=0,second=0)
|
||||
month_end = last_day.replace(hour=23,minute=59,second=59)
|
||||
# last year
|
||||
last_year = last_day.year - 1
|
||||
month_start_year = month_start.replace(year=last_year)
|
||||
m_start_year_next = month_start_year + timedelta(days=31)
|
||||
m_start_year_first = m_start_year_next.replace(day=1)
|
||||
month_end_year = (m_start_year_first - timedelta(days=1)).replace(hour=23,minute=59,second=59)
|
||||
# build tpl and return
|
||||
last_month_tpl = (month_start.strftime('%s'), month_end.strftime('%s'))
|
||||
last_year_tpl = (month_start_year.strftime('%s'), month_end_year.strftime('%s'))
|
||||
return last_month_tpl, last_year_tpl
|
||||
|
||||
|
||||
def get_rows(last_month_tpl, last_year_tpl, config):
|
||||
""" get rows from postgres """
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {last_month_tpl[0]} \
|
||||
AND epoch_time < {last_month_tpl[1]} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
rows_month = cur.fetchall()
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {last_year_tpl[0]} \
|
||||
AND epoch_time < {last_year_tpl[1]} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
rows_year = cur.fetchall()
|
||||
db_close(conn, cur)
|
||||
return rows_month, rows_year
|
||||
|
||||
|
||||
def get_axis(rows_month, rows_year):
|
||||
""" takes rows and returns axis """
|
||||
# initial df
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows_month]
|
||||
y_aqi_values = [int(i[1]) for i in rows_month]
|
||||
data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('8h').mean().round()
|
||||
# reset timestamp to day
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%d %H:%M')
|
||||
mean.set_index('timestamp', inplace=True)
|
||||
# second df with last year data
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows_year]
|
||||
y_aqi_values = [int(i[1]) for i in rows_year]
|
||||
data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
year_mean = indexed.resample('8h').mean().round()
|
||||
# reset timestamp to day
|
||||
year_mean.reset_index(level=0, inplace=True)
|
||||
year_mean['timestamp'] = year_mean['timestamp'].dt.strftime('%d %H:%M')
|
||||
year_mean.set_index('timestamp', inplace=True)
|
||||
# merge the two
|
||||
mean['year_aqi'] = year_mean['year_aqi']
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean.sort_values(by='timestamp', ascending=True, inplace=True)
|
||||
# return axis
|
||||
x = mean['timestamp']
|
||||
y_1 = mean['now_aqi']
|
||||
y_2 = mean['year_aqi']
|
||||
return x, y_1, y_2, mean
|
||||
|
||||
|
||||
def write_monthly_plot(x, y_1, y_2, timestamp):
|
||||
""" plot last-7 only """
|
||||
# parse timestamp
|
||||
date_from = datetime.fromtimestamp(timestamp)
|
||||
date_title = date_from.strftime('%b %Y')
|
||||
month_short = date_from.strftime('%b')
|
||||
file_name = 'dyn/monthly/' + date_from.strftime('%Y-%m') + '.png'
|
||||
plt_title = f'AQI values for: {date_title}'
|
||||
# build ticks
|
||||
y_max = np.ceil(max(y_1.append(y_2))/50)*50 + 50
|
||||
x_range = np.arange(0, len(x), step=9)
|
||||
last_day = int(x.max().split()[0])
|
||||
x_numbers = np.arange(1, last_day + 1, step=3)
|
||||
x_dates = [f'{str(i).zfill(2)} {month_short}' for i in x_numbers]
|
||||
x_ticks = x_range, x_dates
|
||||
# plot
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y_1, color='#313131', label='this year')
|
||||
plt.plot(x, y_2, color='#666666', linestyle='dashed', label='last year')
|
||||
plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#85a762', interpolate=True) # good
|
||||
plt.fill_between(x, y_1, y2=50, where=(y_1 > 50), color='#d4b93c', interpolate=True) # moderate
|
||||
plt.fill_between(x, y_1, y2=100, where=(y_1 > 100), color='#e96843', interpolate=True) # ufsg
|
||||
plt.fill_between(x, y_1, y2=150, where=(y_1 > 150), color='#d03f3b', interpolate=True) # unhealthy
|
||||
plt.fill_between(x, y_1, y2=200, where=(y_1 > 200), color='#be4173', interpolate=True) # vunhealthy
|
||||
plt.fill_between(x, y_1, y2=300, where=(y_1 > 300), color='#714261', interpolate=True) # hazardous
|
||||
plt.fill_between(x, y_1, y2=0, where=(y_1 > 0), color='#ffffff', alpha=0.1, interpolate=True) # soft
|
||||
plt.xticks(x_ticks[0], x_ticks[1])
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.legend()
|
||||
plt.tight_layout()
|
||||
plt.savefig(file_name, dpi = 300)
|
||||
plt.figure()
|
||||
|
||||
|
||||
def get_change(curr, year):
|
||||
""" helper function to get change on thresh """
|
||||
diff_avg = (curr - year) / curr
|
||||
if diff_avg <= -0.15:
|
||||
avg_change = 'down'
|
||||
elif diff_avg >= 0.15:
|
||||
avg_change = 'up'
|
||||
else:
|
||||
avg_change = 'same'
|
||||
return avg_change
|
||||
|
||||
|
||||
def get_aqi(val):
|
||||
""" helper function to get aqi category """
|
||||
if val <= 50:
|
||||
category = 'Good'
|
||||
elif val > 50 and val <= 100:
|
||||
category = 'Moderate'
|
||||
elif val > 100 and val <= 150:
|
||||
category = 'Unhealthy for Sensitive Groups'
|
||||
elif val > 150 and val <= 200:
|
||||
category = 'Unhealthy'
|
||||
elif val > 200 and val <= 300:
|
||||
category = 'Very Unhealthy'
|
||||
else:
|
||||
category = 'Hazardous'
|
||||
return category
|
||||
|
||||
|
||||
def write_monthly_json(mean, timestamp):
|
||||
""" write json file with monthly details """
|
||||
date_from = datetime.fromtimestamp(timestamp)
|
||||
file_name = 'dyn/monthly/' + date_from.strftime('%Y-%m') + '.json'
|
||||
# current
|
||||
curr_min = int(mean['now_aqi'].min())
|
||||
curr_max = int(mean['now_aqi'].max())
|
||||
curr_mean = int(mean['now_aqi'].mean())
|
||||
curr_cat = get_aqi(curr_mean)
|
||||
# last
|
||||
year_min = int(mean['year_aqi'].min())
|
||||
year_max = int(mean['year_aqi'].max())
|
||||
year_mean = int(mean['year_aqi'].mean())
|
||||
year_cat = get_aqi(year_mean)
|
||||
# change
|
||||
min_change = get_change(curr_min, year_min)
|
||||
max_change = get_change(curr_max, year_max)
|
||||
mean_change = get_change(curr_mean, year_mean)
|
||||
# build rows
|
||||
data_rows = []
|
||||
data_rows.append(['min: ', curr_min, year_min, min_change])
|
||||
data_rows.append(['max: ', curr_max, year_max, max_change])
|
||||
data_rows.append(['avg: ', curr_mean, year_mean, mean_change])
|
||||
data_rows.append(['avg aqi: ', curr_cat, year_cat, mean_change])
|
||||
# build dict
|
||||
monthly_dict = {}
|
||||
monthly_dict['data'] = data_rows
|
||||
# write to disk
|
||||
json_str = json.dumps(monthly_dict)
|
||||
with open(file_name, 'w') as f:
|
||||
f.write(json_str)
|
||||
|
||||
|
||||
def monthly_found(timestamp):
|
||||
""" check if monthly graph already created """
|
||||
date_from = datetime.fromtimestamp(timestamp)
|
||||
file_name = 'dyn/monthly/' + date_from.strftime('%Y-%m') + '.png'
|
||||
found = path.isfile(file_name)
|
||||
return found
|
||||
|
||||
|
||||
def create_monthly(config):
|
||||
""" check if last month plot exists, create if needed """
|
||||
last_month_tpl, last_year_tpl = get_epoch()
|
||||
timestamp = int(last_month_tpl[0])
|
||||
found = monthly_found(timestamp)
|
||||
if found:
|
||||
print('monthly already created, skipping...')
|
||||
return
|
||||
else:
|
||||
print('creating monthly graph and json file')
|
||||
# get rows
|
||||
rows_month, rows_year = get_rows(last_month_tpl, last_year_tpl, config)
|
||||
# get axis
|
||||
x, y_1, y_2, mean = get_axis(rows_month, rows_year)
|
||||
# write plot
|
||||
write_monthly_plot(x, y_1, y_2, timestamp)
|
||||
# write data json
|
||||
write_monthly_json(mean, timestamp)
|
|
@ -1,187 +0,0 @@
|
|||
""" creates the PM 2.5 and pm 10 graphs """
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from app.db_connect import db_connect, db_close
|
||||
|
||||
|
||||
def color_colums(y):
|
||||
""" helper function to color bar columns """
|
||||
col = []
|
||||
for val in y:
|
||||
if val <= 50:
|
||||
# good
|
||||
col.append('#85a762')
|
||||
elif val > 50 and val <= 100:
|
||||
# moderate
|
||||
col.append('#d4b93c')
|
||||
elif val > 100 and val <= 150:
|
||||
# ufsg
|
||||
col.append('#e96843')
|
||||
elif val > 150 and val <= 200:
|
||||
# unhealthy
|
||||
col.append('#d03f3b')
|
||||
elif val > 200 and val <= 300:
|
||||
# vunhealthy
|
||||
col.append('#be4173')
|
||||
else:
|
||||
# hazardous
|
||||
col.append('#714261')
|
||||
return col
|
||||
|
||||
|
||||
def get_pm_data(config):
|
||||
""" gets last 10 days worth of data"""
|
||||
now = datetime.now()
|
||||
day_until = int(now.date().strftime('%s'))
|
||||
day_from = day_until - 10 * 24 * 60 * 60
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, pm25, pm10 FROM aqi \
|
||||
WHERE epoch_time > {day_from} \
|
||||
AND epoch_time < {day_until} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
db_close(conn, cur)
|
||||
return rows
|
||||
|
||||
|
||||
def get_pm_axis(rows):
|
||||
""" build axis """
|
||||
# build dataframe
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows]
|
||||
y_pm25_values = [int(i[1]) for i in rows]
|
||||
y_pm10_values = [int(i[2]) for i in rows]
|
||||
data = {
|
||||
'timestamp': x_timeline,
|
||||
'pm25': y_pm25_values,
|
||||
'pm10': y_pm10_values
|
||||
}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True, ascending=True)
|
||||
mean = indexed.resample('1d').mean()
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
# axis
|
||||
mean['pm25'] = mean['pm25'].round()
|
||||
mean['pm10'] = mean['pm10'].round()
|
||||
x = mean['timestamp']
|
||||
y_1 = mean['pm25']
|
||||
y_2 = mean['pm10']
|
||||
return x, y_1, y_2
|
||||
|
||||
|
||||
def build_pm_plot(x, y, y_max, thresh, title):
|
||||
""" write plots to file """
|
||||
file_name = title.replace('.', '')
|
||||
# make ticks
|
||||
x_range = np.arange(10).tolist()
|
||||
x_date_time = pd.to_datetime(x).dt.date.unique()
|
||||
x_dates = [i.strftime('%d %b') for i in x_date_time]
|
||||
# color
|
||||
col = []
|
||||
for val in y:
|
||||
if val < thresh:
|
||||
col.append('#6ecd65')
|
||||
else:
|
||||
col.append('#ff4d4d')
|
||||
# title
|
||||
plt_title = f'Daily avg PM {title} exposure'
|
||||
# plot
|
||||
plt.style.use('seaborn')
|
||||
plt.bar(x_dates, y, color=col, width=0.5)
|
||||
plt.axhline(y=thresh, color='#6ecd65', linestyle=':')
|
||||
plt.xticks(ticks=x_range, labels=x_dates)
|
||||
plt.yticks(np.arange(0, y_max, step=25))
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig(f'dyn/pm{file_name}.png', dpi=300)
|
||||
plt.close('all')
|
||||
plt.figure()
|
||||
|
||||
|
||||
def rebuild_pm_bar(config):
|
||||
""" main function to rebuild pm2.5 and pm10 values """
|
||||
# get data
|
||||
rows = get_pm_data(config)
|
||||
x, y_1, y_2 = get_pm_axis(rows)
|
||||
# max
|
||||
y_max = np.ceil(max(y_1.append(y_2))/25)*25 + 25
|
||||
# pm 2.5
|
||||
build_pm_plot(x, y_1, y_max, thresh=25, title='2.5')
|
||||
# pm 10
|
||||
build_pm_plot(x, y_2, y_max, thresh=50, title='10')
|
||||
# done
|
||||
print('recreated PM 2.5 and PM 10 graphs')
|
||||
|
||||
|
||||
# hour bar chart
|
||||
def get_hour_data(config):
|
||||
""" get last three days worth of data from postgres """
|
||||
# time
|
||||
now = datetime.now()
|
||||
day_until = int(now.date().strftime('%s'))
|
||||
day_from = day_until - 3 * 24 * 60 * 60
|
||||
# call db
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {day_from} \
|
||||
AND epoch_time < {day_until} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
db_close(conn, cur)
|
||||
return rows
|
||||
|
||||
|
||||
def get_hour_axis(rows):
|
||||
""" build x and y from the rows """
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows]
|
||||
y_aqi_values = [int(i[1]) for i in rows]
|
||||
# build dataframe
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('1h').mean()
|
||||
# regroup by hour
|
||||
mean_hour = mean.groupby([mean.index.hour]).mean()
|
||||
mean_hour.reset_index(level=0, inplace=True)
|
||||
# set axis
|
||||
x = mean_hour['timestamp']
|
||||
y = mean_hour['aqi'].round()
|
||||
return x, y
|
||||
|
||||
|
||||
def build_hour_plot(x, y):
|
||||
""" takes x and y and writes plot to file """
|
||||
plt_title = 'Last three days average AQI for each hour'
|
||||
# ticks
|
||||
x_range = np.arange(0, 24, step=3)
|
||||
x_hours = [str(i).zfill(2) + ":00" for i in x_range]
|
||||
y_max = np.ceil(max(y)/50) * 50 + 50
|
||||
# color columns
|
||||
col = color_colums(y)
|
||||
# create plot
|
||||
plt.style.use('seaborn')
|
||||
plt.bar(x, y, color=col, width=0.5)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.xticks(ticks=x_range, labels=x_hours)
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig('dyn/hours.png', dpi=300)
|
||||
plt.close('all')
|
||||
plt.figure()
|
||||
|
||||
|
||||
def rebuild_hour_bar(config):
|
||||
""" main function to rebuild houly bar avg """
|
||||
rows = get_hour_data(config)
|
||||
x, y = get_hour_axis(rows)
|
||||
build_hour_plot(x, y)
|
|
@ -1,140 +0,0 @@
|
|||
""" recreate json file to populate last year comparison table """
|
||||
|
||||
from datetime import datetime
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
from app.db_connect import db_connect, db_close
|
||||
from app.graph_pm import color_colums
|
||||
|
||||
|
||||
def get_rows(config):
|
||||
""" get rows from last 7 days
|
||||
and last 7 days one year ago """
|
||||
now = datetime.now()
|
||||
# last 10
|
||||
now_until = int(now.date().strftime('%s'))
|
||||
now_from = now_until - 7 * 24 * 60 * 60
|
||||
# last 10 one year ago
|
||||
year_until = now_until - 365 * 24 * 60 * 60
|
||||
year_from = now_until - 372 * 24 * 60 * 60
|
||||
# make the call
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {now_from} \
|
||||
AND epoch_time < {now_until} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
now_rows = cur.fetchall()
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {year_from} \
|
||||
AND epoch_time < {year_until} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
year_rows = cur.fetchall()
|
||||
# close and return
|
||||
db_close(conn, cur)
|
||||
return now_rows, year_rows
|
||||
|
||||
|
||||
def initial_df(now_rows, year_rows):
|
||||
""" build mean df with year data split into columns """
|
||||
# first df with current data
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in now_rows]
|
||||
y_aqi_values = [int(i[1]) for i in now_rows]
|
||||
data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('1d').mean().round()
|
||||
# second df with last year data
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in year_rows]
|
||||
y_aqi_values = [int(i[1]) for i in year_rows]
|
||||
data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
year_mean = indexed.resample('1d').mean().round()
|
||||
year_mean.reset_index(level=0, inplace=True)
|
||||
# merge the two
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['year_aqi'] = year_mean['year_aqi']
|
||||
mean.sort_values(by='timestamp', ascending=False, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%d %b')
|
||||
# return result
|
||||
return mean
|
||||
|
||||
|
||||
def write_df(mean):
|
||||
""" finalize df and compare values """
|
||||
# build temp column with diff
|
||||
mean['diff'] = (mean['now_aqi'] - mean['year_aqi']) / mean['now_aqi']
|
||||
mean['change'] = np.where(mean['diff'].abs() < 0.15, 'same', mean['diff'])
|
||||
mean['change'] = np.where(mean['diff'] <= -0.15, 'down', mean['change'])
|
||||
mean['change'] = np.where(mean['diff'] >= 0.15, 'up', mean['change'])
|
||||
del mean['diff']
|
||||
# build average row on top
|
||||
now_avg = mean['now_aqi'].mean()
|
||||
year_avg = mean['year_aqi'].mean()
|
||||
diff_avg = (now_avg - year_avg) / now_avg
|
||||
if diff_avg <= -0.15:
|
||||
avg_change = 'down'
|
||||
elif diff_avg >= 0.15:
|
||||
avg_change = 'up'
|
||||
else:
|
||||
avg_change = 'same'
|
||||
|
||||
# build avg df
|
||||
avg_row = {'timestamp': 'avg 7 days', 'now_aqi': now_avg, 'year_aqi': year_avg, 'change': avg_change}
|
||||
new_row = pd.DataFrame(avg_row, index = [0]).round()
|
||||
mean = pd.concat([new_row, mean]).reset_index(drop = True)
|
||||
# convert to int
|
||||
mean['now_aqi'] = mean['now_aqi'].astype('int')
|
||||
mean['year_aqi'] = mean['year_aqi'].astype('int')
|
||||
# extract and write json from df
|
||||
mean_json = mean.to_json(orient='split')
|
||||
with open('dyn/year-table.json', 'w') as f:
|
||||
f.write(mean_json)
|
||||
|
||||
|
||||
def write_graph(mean):
|
||||
""" recreate barchart with yearly comparison """
|
||||
# build axis
|
||||
mean.sort_index(inplace=True)
|
||||
x = mean['timestamp'].to_list()
|
||||
y_1 = mean['now_aqi'].to_list()
|
||||
y_2 = mean['year_aqi'].to_list()
|
||||
# build color lists
|
||||
col_y_1 = color_colums(y_1)
|
||||
col_y_2 = color_colums(y_2)
|
||||
# set ticks
|
||||
y_max = int(np.ceil(max(y_1 + y_2)/50) * 50 + 50)
|
||||
x_indexes = np.arange(len(x))
|
||||
# build plot
|
||||
width = 0.25
|
||||
plt_title = 'Daily avg AQI values compared to last year'
|
||||
plt_suptitle = 'left: this year, right: last year'
|
||||
plt.style.use('seaborn')
|
||||
# write bars
|
||||
plt.bar(x_indexes - (width / 2) - 0.02, y_1, color=col_y_1, width=width)
|
||||
plt.bar(x_indexes + (width / 2) + 0.02, y_2, color=col_y_2, width=width)
|
||||
plt.title(plt_suptitle,fontsize=15)
|
||||
plt.suptitle(plt_title,fontsize=20, y=0.96)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.xticks(ticks=x_indexes, labels=x)
|
||||
plt.tight_layout()
|
||||
plt.savefig('dyn/year-graph.png', dpi=300)
|
||||
plt.figure()
|
||||
|
||||
|
||||
def rebuild_table(config):
|
||||
""" main function to recreate year comparison table """
|
||||
now_rows, year_rows = get_rows(config)
|
||||
mean = initial_df(now_rows, year_rows)
|
||||
write_df(mean)
|
||||
write_graph(mean)
|
||||
# done
|
||||
print('recreated year comparison graph and json file')
|
|
@ -1,124 +0,0 @@
|
|||
import configparser
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
from flask_httpauth import HTTPBasicAuth
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
|
||||
from app import app
|
||||
from app import aqi_parser
|
||||
from app import weather
|
||||
from app import graph
|
||||
from app import graph_pm
|
||||
from app import table_export
|
||||
from app import graph_monthly
|
||||
from app.db_connect import db_insert
|
||||
|
||||
|
||||
def get_config():
|
||||
""" read out config file """
|
||||
# parse
|
||||
config_parser = configparser.ConfigParser()
|
||||
config_parser.read('config')
|
||||
# build dict
|
||||
config = {}
|
||||
config["authUsername"] = config_parser.get('aqi_monitor', "authUsername")
|
||||
config["authPassword"] = config_parser.get('aqi_monitor', "authPassword")
|
||||
config["api_key"] = config_parser.get('openweathermap', "api_key")
|
||||
config["lat"] = config_parser.get('openweathermap', "lat")
|
||||
config["lon"] = config_parser.get('openweathermap', "lon")
|
||||
# db
|
||||
config["db_host"] = config_parser.get('postgres', "db_host")
|
||||
config["db_database"] = config_parser.get('postgres', "db_database")
|
||||
config["db_user"] = config_parser.get('postgres', "db_user")
|
||||
config["db_password"] = config_parser.get('postgres', "db_password")
|
||||
return config
|
||||
|
||||
|
||||
# start up
|
||||
auth = HTTPBasicAuth()
|
||||
config = get_config()
|
||||
weather.handle_weather(config)
|
||||
graph.create_current(config)
|
||||
graph_pm.rebuild_pm_bar(config)
|
||||
graph.rebuild_3days(config)
|
||||
graph.rebuild_7days(config)
|
||||
graph_pm.rebuild_hour_bar(config)
|
||||
table_export.rebuild_table(config)
|
||||
graph_monthly.create_monthly(config)
|
||||
|
||||
# build username / pw dict for basic auth
|
||||
USER_DATA = {}
|
||||
USER_DATA[config['authUsername']] = config['authPassword']
|
||||
|
||||
|
||||
# start scheduler
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_job(
|
||||
weather.handle_weather, args=[config], trigger="interval", name='weather_api', seconds=900
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph.create_current, args=[config], trigger="cron", minute='*/5', name='current_graph'
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph.rebuild_3days, args=[config], trigger="cron", day='*', hour='1', minute='1', name='3_days'
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph.rebuild_7days, args=[config], trigger="cron", day='*', hour='1', minute='2', name='7_days'
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph_pm.rebuild_pm_bar, args=[config], trigger="cron", day='*', hour='1', minute='3', name='pm_bar'
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph_pm.rebuild_hour_bar, args=[config], trigger="cron", day='*', hour='1', minute='4', name='hour_bar'
|
||||
)
|
||||
scheduler.add_job(
|
||||
table_export.rebuild_table, args=[config], trigger="cron", day='*', hour='1', minute='6', name='rebuild_table'
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph_monthly.create_monthly, args=[config], trigger="cron", day='*', hour='1', minute='7', name='create_monthly'
|
||||
)
|
||||
scheduler.start()
|
||||
|
||||
|
||||
@auth.verify_password
|
||||
def verify(username, password):
|
||||
if not (username and password):
|
||||
return False
|
||||
return USER_DATA.get(username) == password
|
||||
|
||||
|
||||
# ingest
|
||||
@app.route('/ingest', methods=['POST'])
|
||||
@auth.login_required
|
||||
def ingest():
|
||||
data = request.json
|
||||
if data:
|
||||
# populate data dict
|
||||
json_dict, error_found = aqi_parser.input_process(data)
|
||||
if error_found:
|
||||
print('pm25 read failed')
|
||||
print(json_dict)
|
||||
else:
|
||||
# save to db
|
||||
time_stamp = db_insert(config, json_dict)
|
||||
print(f'db insert done at {time_stamp}')
|
||||
# save to webserver
|
||||
data = json.dumps(json_dict)
|
||||
with open('dyn/air.json', 'w') as f:
|
||||
f.write(data)
|
||||
print(data)
|
||||
return 'ingest'
|
||||
|
||||
|
||||
# output
|
||||
@app.route('/')
|
||||
def home():
|
||||
try:
|
||||
with open('dyn/air.json', 'r') as f:
|
||||
data = f.read()
|
||||
except FileNotFoundError:
|
||||
# will get regeneratod on next run
|
||||
data = '{}'
|
||||
return data
|
|
@ -1,48 +0,0 @@
|
|||
""" get data from openweathermap.org """
|
||||
|
||||
from datetime import datetime
|
||||
from time import sleep
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def get_weather(config):
|
||||
"""
|
||||
gets the missing weather data from openweathermap
|
||||
return: json string
|
||||
"""
|
||||
api_key = config['api_key']
|
||||
lat = config['lat']
|
||||
lon = config['lon']
|
||||
# get data
|
||||
r = requests.get("https://api.openweathermap.org/data/2.5/weather?&units=metric&appid=" + api_key + "&lat=" + lat + "&lon=" + lon, timeout=20)
|
||||
# format data
|
||||
r_json = r.json()
|
||||
weather_name = r_json['weather'][0]['main']
|
||||
weather_icon = r_json['weather'][0]['icon']
|
||||
wind_speed = r_json['wind']['speed']
|
||||
wind_direction = r_json['wind']['deg']
|
||||
# timestamp
|
||||
now = datetime.now()
|
||||
timestamp = now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
epoch_time = int(now.strftime('%s'))
|
||||
# form dict
|
||||
json_dict = {}
|
||||
json_dict['weather_name'] = weather_name
|
||||
json_dict['weather_icon'] = weather_icon
|
||||
json_dict['wind_speed'] = wind_speed
|
||||
json_dict['wind_direction'] = wind_direction
|
||||
json_dict['timestamp'] = timestamp
|
||||
json_dict['epoch_time'] = epoch_time
|
||||
# return json string
|
||||
weather_json = json.dumps(json_dict)
|
||||
return weather_json, timestamp
|
||||
|
||||
|
||||
def handle_weather(config):
|
||||
""" sets infinite loop to collect api data """
|
||||
weather_json, timestamp = get_weather(config)
|
||||
with open('dyn/weather.json', 'w') as f:
|
||||
f.write(weather_json)
|
||||
print(f'weather data updated: {timestamp}')
|
|
@ -1,14 +0,0 @@
|
|||
[aqi_monitor]
|
||||
authUsername = username
|
||||
authPassword = password
|
||||
|
||||
[openweathermap]
|
||||
api_key = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
lat = 40.71
|
||||
lon = -73.99
|
||||
|
||||
[postgres]
|
||||
db_host = postgres
|
||||
db_database = aqi
|
||||
db_user = aqi
|
||||
db_password = aaaaaaaaaaaaaaaaa
|
|
@ -1,26 +0,0 @@
|
|||
"""
|
||||
|
||||
Documentation: https://flask.palletsprojects.com
|
||||
|
||||
Install on Arch
|
||||
- sudo pacman -S python-flask
|
||||
- sudo pacman -S python-flask-httpauth
|
||||
|
||||
or with pip
|
||||
- pip install Flask
|
||||
- pip install Flask-HTTPAuth
|
||||
|
||||
|
||||
Run Debug env:
|
||||
export FLASK_APP=run.py
|
||||
export FLASK_DEBUG=1
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from app import app
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
|
@ -4,12 +4,9 @@
|
|||
rsync --progress -a docker-compose.yml vps2:docker/
|
||||
rsync --progress -a env vps2:docker/
|
||||
rsync --progress -a --delete-after helper_scripts vps2:docker/
|
||||
rsync --progress -a --delete-after \
|
||||
--exclude dyn --exclude config.sample --exclude __pychache__ \
|
||||
backend vps2:docker/
|
||||
rsync --progress -a --delete-after \
|
||||
--exclude dyn \
|
||||
frontend vps2:docker/
|
||||
rsync --progress -a --delete-after nginx vps2:docker/
|
||||
rsync --progress -a --delete-after --exclude config.json.sample --exclude **/__pychache__ --exclude static/dyn \
|
||||
web vps2:docker/
|
||||
|
||||
##
|
||||
exit 0
|
||||
|
|
|
@ -34,26 +34,28 @@ services:
|
|||
- "com.github.jrcs.letsencrypt_nginx_proxy_companion.nginx_proxy"
|
||||
# backend flask
|
||||
flask:
|
||||
build: ./backend/flask
|
||||
build: ./web
|
||||
container_name: flask
|
||||
restart: always
|
||||
volumes:
|
||||
- ./volume/flask/dyn:/app/dyn
|
||||
- ./volume/flask/dyn:/app/static/dyn
|
||||
environment:
|
||||
- APP_NAME=FlaskBackend
|
||||
- APP_NAME=FlaskAqi
|
||||
- TZ=Asia/Bangkok
|
||||
expose:
|
||||
- "8080"
|
||||
# backend nginx
|
||||
# nginx infront of uwsgi
|
||||
nginx:
|
||||
build: ./backend/nginx
|
||||
build: ./nginx
|
||||
container_name: backend_nginx
|
||||
restart: always
|
||||
environment:
|
||||
- VIRTUAL_HOST=data.lpb-air.com
|
||||
- LETSENCRYPT_HOST=data.lpb-air.com
|
||||
- VIRTUAL_HOST=www.lpb-air.com,lpb-air.com
|
||||
- LETSENCRYPT_HOST=www.lpb-air.com,lpb-air.com
|
||||
expose:
|
||||
- "80"
|
||||
depends_on:
|
||||
- flask
|
||||
# backend postgres
|
||||
postgres:
|
||||
image: postgres
|
||||
|
@ -65,29 +67,3 @@ services:
|
|||
- ./env/postgres.env
|
||||
expose:
|
||||
- "5432"
|
||||
# php fast-cgi
|
||||
php:
|
||||
image: php:7-fpm
|
||||
container_name: php-fpm
|
||||
restart: always
|
||||
volumes:
|
||||
- ./frontend/nginx:/air
|
||||
- type: bind
|
||||
source: ./volume/flask/dyn
|
||||
target: /air/dyn
|
||||
# lpb-air
|
||||
air:
|
||||
image: nginx
|
||||
container_name: lpb-air_nginx
|
||||
restart: always
|
||||
expose:
|
||||
- "80"
|
||||
volumes:
|
||||
- type: bind
|
||||
source: ./volume/flask/dyn
|
||||
target: /air/dyn
|
||||
- ./frontend/nginx:/air:rw
|
||||
- ./frontend/nginx.conf:/etc/nginx/conf.d/default.conf
|
||||
environment:
|
||||
- VIRTUAL_HOST=www.lpb-air.com,lpb-air.com
|
||||
- LETSENCRYPT_HOST=www.lpb-air.com,lpb-air.com
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
server {
|
||||
listen 80;
|
||||
index index.php index.html;
|
||||
server_name localhost;
|
||||
error_log /var/log/nginx/error.log;
|
||||
access_log /var/log/nginx/access.log;
|
||||
root /air;
|
||||
|
||||
location ~ \.php$ {
|
||||
try_files $uri =404;
|
||||
fastcgi_split_path_info ^(.+\.php)(/.+)$;
|
||||
fastcgi_pass php:9000;
|
||||
fastcgi_index index.php;
|
||||
include fastcgi_params;
|
||||
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
||||
fastcgi_param PATH_INFO $fastcgi_path_info;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
server_name lpb-air.com;
|
||||
return 301 https://www.lpb-air.com$request_uri;
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<title>About</title>
|
||||
<script src="/js/aqi.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="preload">
|
||||
<img src="/img/cloud_colors.gif" alt="cloud_animation">
|
||||
</div>
|
||||
<div class="block_wrap light_background">
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/topnav.php'); ?>
|
||||
</div>
|
||||
<div class="block_wrap">
|
||||
<div class="content">
|
||||
<h2>About</h2>
|
||||
<p>This page and its contents are still under construction. More content is coming soon.</p>
|
||||
<p>The data for this page is collected from an air monitor located just outside of Luang Prabang, Laos. While we do our best, no guarantee is given for the accuracy of this data.</p>
|
||||
<p>The data is updated every 3 minutes. Contrary to international websites who measure the air pollution via satellite images and rely on estimates and averages, an on-site air monitor delivers real time values that paint a much more accurate picture of the local situation.</p>
|
||||
<p>Roughly, the Air Quality Index (AQI) is the internationally used air quality standard to measure the pollution of the air. It is divided into 6 levels, and according to these levels, certain health advices are given:</p>
|
||||
</div>
|
||||
<div class="aqidesc content">
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn">
|
||||
<p>Aqi Values</p>
|
||||
</div>
|
||||
<div class="rightcolumn hide">
|
||||
<p>Description</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn category-class good">
|
||||
<p>0 to 50:</p>
|
||||
</div>
|
||||
<div class="rightcolumn">
|
||||
<p>Good: No health concerns, enjoy activities.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn category-class moderate">
|
||||
<p>51 - 100:</p>
|
||||
</div>
|
||||
<div class="rightcolumn">
|
||||
<p>Moderate: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn category-class ufsg">
|
||||
<p>101 - 150:</p>
|
||||
</div>
|
||||
<div class="rightcolumn">
|
||||
<p>Unhealthy for Sensitive Groups: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn category-class unhealthy">
|
||||
<p>151 - 200:</p>
|
||||
</div>
|
||||
<div class="rightcolumn">
|
||||
<p>Unhealthy: Everyone may begin to experience health effects: Active children and adults, and people with respiratory disease, such as asthma, should avoid prolonged outdoor exertion; everyone else, especially children, should limit prolonged outdoor exertion</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn category-class vunhealthy">
|
||||
<p>201 - 300:</p>
|
||||
</div>
|
||||
<div class="rightcolumn">
|
||||
<p>Very Unhealthy: Active children and adults, and people with respiratory disease, such as asthma, should avoid all outdoor exertion; everyone else, especially children, should limit outdoor exertion.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="aqirow">
|
||||
<div class="leftcolumn category-class hazardous">
|
||||
<p>301 - 500:</p>
|
||||
</div>
|
||||
<div class="rightcolumn">
|
||||
<p>Hazardous: Everyone should avoid all outdoor exertion.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="credits content">
|
||||
<h2>Credits</h2>
|
||||
<p>Partial Weather data, namely weather icon, weather description and windspeed are provided from <a href="https://openweathermap.org/ " target="_blank">openweather.org</a> API distributed under the <a href="https://openweathermap.org/full-price" target="_blank">Creative Commons Attribution-ShareAlike 4.0 Generic License</a>.</p>
|
||||
<p><a target="_blank" href="https://github.com/lokesh/lightbox2">Lightbox</a> made by Lokesh Dhakar, released under the <a target="_blank" href="https://raw.githubusercontent.com/lokesh/lightbox2/master/LICENSE">MIT license</a>.</p>
|
||||
</div>
|
||||
</div>
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/footer.html'); ?>
|
||||
</body>
|
||||
</html>
|
|
@ -1,476 +0,0 @@
|
|||
@font-face {
|
||||
font-family: Rubik-Bold;
|
||||
src: url(/font/Rubik-Bold.ttf);
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Rubik-Light;
|
||||
src: url(/font/Rubik-Light.ttf);
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Rubik-Regular;
|
||||
src: url(/font/Rubik-Regular.ttf);
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-family: Rubik-Bold;
|
||||
font-size: 3em;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-family: Rubik-Bold;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-family: Rubik-Light;
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
p, li, td, th {
|
||||
font-family: Rubik-Regular;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
.preload {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
width: 100%;
|
||||
height: 100vh;
|
||||
background: #e6e6e6;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.preload img {
|
||||
width: 100px;
|
||||
}
|
||||
|
||||
.preload-finish {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
font-family: Rubik-Regular;
|
||||
}
|
||||
|
||||
.content {
|
||||
width: 70%;
|
||||
max-width: 900px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.content-date {
|
||||
display: inline-block;
|
||||
min-width: 90px;
|
||||
}
|
||||
|
||||
.colorbox {
|
||||
color: #fff;
|
||||
min-height: 40px;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
top: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.colorbox p {
|
||||
margin: 0;
|
||||
padding: 10px 0;
|
||||
}
|
||||
|
||||
.colorbox ul {
|
||||
margin: 0;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
.top_content {
|
||||
display: flex;
|
||||
flex-wrap: nowrap;
|
||||
}
|
||||
|
||||
.nav {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex: 20%;
|
||||
}
|
||||
|
||||
.nav ul {
|
||||
list-style-type:none;
|
||||
}
|
||||
|
||||
.nav li {
|
||||
padding: 5px 40px;
|
||||
margin: 5px;
|
||||
border-style: none none solid none;
|
||||
border-width: 2px;
|
||||
}
|
||||
|
||||
.nav a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.title {
|
||||
flex: 80%;
|
||||
}
|
||||
|
||||
.divider {
|
||||
padding: 30px 0;
|
||||
}
|
||||
|
||||
.divider hr {
|
||||
border: 1px solid;
|
||||
}
|
||||
|
||||
.cloud {
|
||||
position: relative;
|
||||
padding-bottom: 30px;
|
||||
}
|
||||
|
||||
.cloud img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.aqi_box {
|
||||
position: absolute;
|
||||
top: 45%;
|
||||
left: 57%;
|
||||
text-align: right;
|
||||
transform: translate(-50%, -50%);
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.aqi_box h1 {
|
||||
font-size: 15em;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.aqi_box h2 {
|
||||
font-family: Rubik-Light;
|
||||
font-size: 1.8em;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.aqi_box p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* weather styling */
|
||||
.block_wrap {
|
||||
margin-bottom: 40px;
|
||||
}
|
||||
|
||||
.weather_box {
|
||||
border: solid 2px;
|
||||
border-radius: 20px;
|
||||
height: 100px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.weather_content {
|
||||
display: grid;
|
||||
grid-template-columns: auto auto auto auto;
|
||||
grid-column-gap: 10px;
|
||||
}
|
||||
|
||||
.weather_icon {
|
||||
width: 50px;
|
||||
padding: 10px;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
|
||||
.weather_icon img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.weather_text {
|
||||
width: 60%;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 40%;
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
|
||||
.weather_text h3,
|
||||
.weather_text p {
|
||||
margin: 10px 0;
|
||||
}
|
||||
|
||||
/* descriptions */
|
||||
.desc_wrap {
|
||||
margin: 40px 0;
|
||||
padding: 30px 0;
|
||||
}
|
||||
|
||||
.desc_content {
|
||||
padding-top: 30px;
|
||||
padding-bottom: 30px;
|
||||
display: grid;
|
||||
grid-template-columns: 33% 33% 33%;
|
||||
grid-column-gap: 10px;
|
||||
}
|
||||
|
||||
.desc_box {
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.desc_item_wrap {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.desc_item {
|
||||
padding: 8px 10px;
|
||||
margin: 4px;
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
width: 200px;
|
||||
transform: scale(1);
|
||||
transition-timing-function: ease;
|
||||
-webkit-transition: transform 2s;
|
||||
-moz-transition: transform 2s;
|
||||
-ms-transition: transform 2s;
|
||||
-o-transition: transform 2s;
|
||||
transition: transform 2s;
|
||||
}
|
||||
|
||||
.good {
|
||||
background-color: #85a762;
|
||||
}
|
||||
.moderate {
|
||||
background-color: #d4b93c;
|
||||
}
|
||||
.ufsg {
|
||||
background-color: #e96843;
|
||||
}
|
||||
.unhealthy {
|
||||
background-color: #d03f3b;
|
||||
}
|
||||
.vunhealthy {
|
||||
background-color: #be4173;
|
||||
}
|
||||
.hazardous {
|
||||
background-color: #714261;
|
||||
}
|
||||
|
||||
.category_icon {
|
||||
padding: 30px;
|
||||
}
|
||||
.category_icon img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.tagline_content {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.desc_item.active {
|
||||
transform: scale(1.2);
|
||||
font-size: 1.1em;
|
||||
margin: 15px;
|
||||
box-shadow: darkgray 5px 5px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* table */
|
||||
.year-table {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.year-table table{
|
||||
width: 100%;
|
||||
max-width: 500px;
|
||||
table-layout: fixed;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.year-table thead th {
|
||||
padding: 5px 0;
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.year-table tbody td:nth-child(1) {
|
||||
padding: 3px 0 3px 10px;
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.year-table td:nth-child(2),
|
||||
.year-table td:nth-child(3),
|
||||
.year-table td:nth-child(4) {
|
||||
text-align: center;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
/* about */
|
||||
.aqirow {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
margin: 10px auto;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.leftcolumn {
|
||||
width: 150px;
|
||||
margin: 0;
|
||||
padding: 10px 15px;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.leftcolumn.category-class {
|
||||
font-weight: bold;
|
||||
color: #FFFFFF;
|
||||
}
|
||||
.rightcolumn {
|
||||
width: 100%;
|
||||
margin: 0px;
|
||||
padding: 10px;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.credits {
|
||||
padding-top: 30px;
|
||||
padding-bottom: 30px;
|
||||
}
|
||||
|
||||
.leftcolumn p,
|
||||
.rightcolumn p {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
align-items: center;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
/* graphs */
|
||||
.graph2 {
|
||||
display: grid;
|
||||
grid-template-columns: 50% 50%;
|
||||
grid-column-gap: 10px;
|
||||
}
|
||||
|
||||
.graph3 {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.graph_item img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* footer */
|
||||
.footer_wrap {
|
||||
height: 50px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
|
||||
/* responsiv */
|
||||
@media screen and (max-width: 1100px) {
|
||||
.top_content,
|
||||
.tagline_content,
|
||||
.weather_content {
|
||||
width: 90%;
|
||||
}
|
||||
.desc_content {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
.desc_box:nth-child(1) {
|
||||
grid-column: span 1;
|
||||
grid-row: span 2;
|
||||
}
|
||||
.desc_box:nth-child(2),
|
||||
.desc_box:nth-child(3) {
|
||||
grid-column: span 1;
|
||||
grid-row: span 1;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (max-width: 800px) {
|
||||
h1 {
|
||||
font-size: 2.5em;
|
||||
}
|
||||
.top_content {
|
||||
flex-direction: column-reverse;
|
||||
}
|
||||
.content {
|
||||
width: 95%;
|
||||
}
|
||||
.nav {
|
||||
flex-wrap: nowrap;
|
||||
}
|
||||
.nav li {
|
||||
display: inline-flex;
|
||||
padding: 5px;
|
||||
}
|
||||
.nav ul {
|
||||
margin: 10px auto;
|
||||
padding: 0;
|
||||
}
|
||||
.title h1 {
|
||||
margin: 0;
|
||||
}
|
||||
.title {
|
||||
padding: 0;
|
||||
}
|
||||
.aqi_box {
|
||||
width: 85%;
|
||||
top: 40%;
|
||||
text-align: center;
|
||||
}
|
||||
.aqi_box h1 {
|
||||
font-size: 5em;
|
||||
}
|
||||
.aqi_box h2 {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
.weather_content {
|
||||
grid-template-columns: auto auto;
|
||||
grid-row-gap: 10px;
|
||||
}
|
||||
.weather_icon {
|
||||
padding: 5px;
|
||||
}
|
||||
.weather_icon img {
|
||||
width: 100%;
|
||||
}
|
||||
.weather_text {
|
||||
left: 35%;
|
||||
}
|
||||
.weather_text h3 {
|
||||
font-size: 1.2;
|
||||
}
|
||||
.desc_content {
|
||||
display: block;
|
||||
}
|
||||
.desc_item {
|
||||
padding: 0 10px;
|
||||
width: 50%;
|
||||
}
|
||||
.category_icon {
|
||||
padding: 0 20%;
|
||||
}
|
||||
.graph2 {
|
||||
display: block;
|
||||
}
|
||||
.graph3 {
|
||||
flex-direction: column-reverse;
|
||||
}
|
||||
}
|
|
@ -1,147 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<link rel="stylesheet" href="/css/lightbox.css">
|
||||
<title>Graphs</title>
|
||||
<script src="/js/aqi.js"></script>
|
||||
<script src="/js/lightbox.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="preload">
|
||||
<img src="/img/cloud_colors.gif" alt="cloud_animation">
|
||||
</div>
|
||||
<div class="block_wrap light_background">
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/topnav.php'); ?>
|
||||
</div>
|
||||
<div class="block_wrap">
|
||||
<div class="content">
|
||||
<h1>Graphs</h1>
|
||||
<p>All the graphs and table on this page will get recreated every night with the newest values.</p>
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2 id="last3">Last three days</h2>
|
||||
</div>
|
||||
<div class="graph3 content">
|
||||
<div class="graph_item">
|
||||
<p>Three days ago</p>
|
||||
<a href="/dyn/day-3.png" data-lightbox="graph">
|
||||
<img src="/dyn/day-3.png" alt="day-3">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph_item">
|
||||
<p>Two days ago</p>
|
||||
<a href="/dyn/day-2.png" data-lightbox="graph">
|
||||
<img src="/dyn/day-2.png" alt="day-2">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph_item">
|
||||
<p>Yesterday</p>
|
||||
<a href="/dyn/day-1.png" data-lightbox="graph">
|
||||
<img src="/dyn/day-1.png" alt="day-1">
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content divider">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2 id="pm">Particle Matter sizes</h2>
|
||||
<p><b>There is no healthy level of pollution.</b> Particle matter (PM) are defined in two different sizes: PM 2.5 which represents particle sizes smaller than 2.5 µm or less than 1/20th of the diameter of a human hair and PM 10 which represents particle sizer smaller than 10 µm or 1/5th of the diameter of a human hair.</p>
|
||||
<p>The <a href="https://www.who.int/news-room/fact-sheets/detail/ambient-(outdoor)-air-quality-and-health" target="_blank">WHO</a> is providing more details on their website regarding particle matter and their health implications. On <a href="https://en.wikipedia.org/wiki/Particulates" target="blank">Wikipedia</a> there are some interesting links to studies for further reading.</p>
|
||||
</div>
|
||||
<div class="graph2 content">
|
||||
<div class="graph_item">
|
||||
<a href="/dyn/pm25.png" data-lightbox="pm-bar">
|
||||
<img src="/dyn/pm25.png" alt="pm 2.5 bar chart">
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<h3>PM 2.5</h3>
|
||||
<p>Particle matter sizes smaller than 2.5µm are the most problematic as these particles will find their way through the lungs into the bloodstream.</p>
|
||||
<p>The WHO Air quality guideline values set a 25 µg/m³ 24-hour average as an upper level threshold. In the 10 days overview you can see:</p>
|
||||
<p>Green: Daily average exposure below 25 µg/m³<br>
|
||||
Red: Daily average exposure above 25 µg/m³</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="graph2 content">
|
||||
<div class="graph_item">
|
||||
<a href="/dyn/pm10.png" data-lightbox="pm-bar">
|
||||
<img src="/dyn/pm10.png" alt="pm 10 bar chart">
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<h3>PM 10</h3>
|
||||
<p>The threshold for the daily average PM 10 exposure is set to 50 µg/m³ by the WHO. Particles this size can penetrate and lodge deep inside the lungs but are too big to enter the blood stream. For this reason the threshold is higher.</p>
|
||||
<p>In the 10 days overview you can see:</p>
|
||||
<p>Green: Daily average exposure below 50 µg/m³<br>
|
||||
Red: Daily average exposure above 50 µg/m³</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content divider">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2 id="hour">Hour by Hour</h2>
|
||||
</div>
|
||||
<div class="graph2 content">
|
||||
<div class="graph_item">
|
||||
<a href="/dyn/hours.png" data-lightbox="hours-bar">
|
||||
<img src="/dyn/hours.png" alt="hours 10 bar chart">
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<h3>Hourly AQI average</h3>
|
||||
<p>The AQI value can change a lot during the day. This can depend on the wind, cooking on fire or just another additional source of pollution nearby.</p>
|
||||
<p>In this chart you can see the average AQI for each hour. This data is based on the last three days. This data can help to visualize which hours in the past three days have been better or worse on average.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content divider">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2>Compared to last year</h2>
|
||||
<p>This year's daily average AQI values from last 7 days compared to corresponding values from last year.</p>
|
||||
</div>
|
||||
<div class="graph2 content">
|
||||
<div class="year-table" id="compare">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>this year</th>
|
||||
<th>last year</th>
|
||||
<th>change</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="aqi-table">
|
||||
<!-- start trs -->
|
||||
<?php
|
||||
$json_file = $_SERVER['DOCUMENT_ROOT'] . '/dyn/year-table.json';
|
||||
$json = json_decode(file_get_contents($json_file), true);
|
||||
$rows = $json['data'];
|
||||
foreach($rows as $row) {
|
||||
echo '<tr>';
|
||||
foreach($row as $cell) {
|
||||
echo '<td>' . $cell . '</td>';
|
||||
}
|
||||
echo '</tr>';
|
||||
}
|
||||
?>
|
||||
<!-- end trs -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="graph_item">
|
||||
<a href="/dyn/year-graph.png" data-lightbox="graph">
|
||||
<img src="/dyn/year-graph.png" alt="last year comparison">
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/footer.html'); ?>
|
||||
</body>
|
||||
</html>
|
|
@ -1,5 +0,0 @@
|
|||
<div class="col_bg footer_wrap">
|
||||
<div class="content footer">
|
||||
<p>© 2021 | <a href="https://github.com/bbilly1/aqi_monitor" target="_blank">Documentation</a></p>
|
||||
</div>
|
||||
</div>
|
|
@ -1,18 +0,0 @@
|
|||
<div class="col_bg colorbox" id="colorbox">
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/update.html'); ?>
|
||||
</div>
|
||||
<div class="top_content content">
|
||||
<div class="title">
|
||||
<h1>Live Air Quality</h1>
|
||||
<h2>in Luang Prabang Laos PDR</h2>
|
||||
<p>Last updated: <span id="timestamp"></span></p>
|
||||
</div>
|
||||
<div class="nav">
|
||||
<ul>
|
||||
<a href="/"><li class="col_border">Home</li></a>
|
||||
<a href="/about"><li class="col_border">About</li></a>
|
||||
<a href="/graphs"><li class="col_border">Graphs</li></a>
|
||||
<a href="/monthly"><li class="col_border">Monthly</li></a>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
|
@ -1,129 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<link rel="stylesheet" href="/css/lightbox.css">
|
||||
<title>AQI</title>
|
||||
<script src="/js/aqi.js"></script>
|
||||
<script src="/js/lightbox.js"></script>
|
||||
<meta property="og:title" content="Live Air Quality in Luang Prabang Laos PDR" />
|
||||
<meta property="og:url" content="https://www.lpb-air.com/" />
|
||||
<meta property="og:image" content="https://www.lpb-air.com/img/social_preview.jpg" />
|
||||
<meta property="og:type" content="website" />
|
||||
<meta property="og:description" content="Real-time and on site air quality measurment and changes over time." />
|
||||
</head>
|
||||
<body>
|
||||
<div class="preload">
|
||||
<img src="/img/cloud_colors.gif" alt="cloud_animation">
|
||||
</div>
|
||||
<div class="block_wrap light_background">
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/topnav.php'); ?>
|
||||
<div class="top_content content">
|
||||
<div class="cloud">
|
||||
<img src="/img/cloud.png" alt="cloud" class="col_filter">
|
||||
<div class="aqi_box">
|
||||
<h1 id="aqiValue"></h1>
|
||||
<p id="aqi-label">US AQI</p>
|
||||
<h2 id="aqiCategory"></h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="block_wrap">
|
||||
<div class="weather_content content">
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="/img/icon/000.png" alt="weather_icon" class="col_filter" id="weather_icon">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font"><span id="temperature"></span><span> °C</span></h3>
|
||||
<p id="weather_name"></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="/img/icon/wind.png" alt="wind_icon" class="col_filter">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font">Wind</h3>
|
||||
<p><span id="wind_speed"></span><span> km/h</span></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="/img/icon/humidity.png" alt="humidity_icon" class="col_filter">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font">Humidity</h3>
|
||||
<p><span id="humidity"></span><span> %</span></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="/img/icon/pressure.png" alt="pressure_icon" class="col_filter">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font">Pressure</h3>
|
||||
<p><span id="pressure"></span><span> mbar</span></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="block_wrap light_background">
|
||||
<div class="desc_content content">
|
||||
<div class="desc_box">
|
||||
<div class="desc_item_wrap">
|
||||
<div class="desc_item good">
|
||||
<p>GOOD</p>
|
||||
</div>
|
||||
<div class="desc_item moderate">
|
||||
<p>MODERATE</p>
|
||||
</div>
|
||||
<div class="desc_item ufsg">
|
||||
<p>UNHEALTHY FOR SENSITIVE GROUPS</p>
|
||||
</div>
|
||||
<div class="desc_item unhealthy">
|
||||
<p>UNHEALTHY</p>
|
||||
</div>
|
||||
<div class="desc_item vunhealthy">
|
||||
<p>VERY UNHEALTHY</p>
|
||||
</div>
|
||||
<div class="desc_item hazardous">
|
||||
<p>HAZARDOUS</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="desc_box">
|
||||
<div class="category_icon">
|
||||
<img src="/img/icon/category-plain.png" alt="category_icon" id="categoryIcon">
|
||||
</div>
|
||||
</div>
|
||||
<div class="desc_box">
|
||||
<h2 class="col_font" id="aqiName"></h2>
|
||||
<h3 id="aqiRange"></h3>
|
||||
<p id="aqiDesc">Good: No health concerns, enjoy activities.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="block_wrap">
|
||||
<div class="graph2 content">
|
||||
<div class="graph_item">
|
||||
<h3 id="last3">Last three hours</h3>
|
||||
<a href="/dyn/current.png" data-lightbox="graph" id="last3-a">
|
||||
<img src="/dyn/current.png" alt="current" id="last3-img">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph_item">
|
||||
<h3 id=last7>Last 7 days</h3>
|
||||
<a href="/dyn/last-7.png" data-lightbox="graph">
|
||||
<img src="/dyn/last-7.png" alt="last-7 days">
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/footer.html'); ?>
|
||||
</body>
|
||||
</html>
|
|
@ -1,56 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
<link rel="stylesheet" href="/css/lightbox.css">
|
||||
<title>Monthly</title>
|
||||
<script src="/js/aqi.js"></script>
|
||||
<script src="/js/lightbox.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="preload">
|
||||
<img src="/img/cloud_colors.gif" alt="cloud_animation">
|
||||
</div>
|
||||
<div class="block_wrap light_background">
|
||||
<?php include($_SERVER['DOCUMENT_ROOT'] . '/incl/topnav.php'); ?>
|
||||
</div>
|
||||
<div class="block_wrap">
|
||||
<div class="content">
|
||||
<h1>Month by month</h1>
|
||||
<p>Month compared to last year. Values are in 8h average.</p>
|
||||
</div>
|
||||
<!-- list start -->
|
||||
<?php
|
||||
foreach(array_reverse(glob($_SERVER['DOCUMENT_ROOT'] . '/dyn/monthly/*.png')) as $month) {
|
||||
$file_name = basename($month,".png");
|
||||
$json_file = $_SERVER['DOCUMENT_ROOT'] . '/dyn/monthly/'.$file_name.'.json';
|
||||
$json = json_decode(file_get_contents($json_file), true);
|
||||
$rows = $json['data'];
|
||||
$date = new DateTime($file_name);
|
||||
$date_str = $date->format('F Y');
|
||||
echo '<div class="content"><h2>'.$date_str.'</h2></div>';
|
||||
echo '<div class="graph2 content">';
|
||||
echo '<div class="graph_item"><a href="/dyn/monthly/'.$file_name.'.png" data-lightbox="monthly">';
|
||||
echo '<img src="/dyn/monthly/'.$file_name.'.png" alt="'.$file_name.'"></a></div>';
|
||||
echo '<div class="year-table"><table>';
|
||||
echo '<thead><tr><th></th><th>this year</th><th>last year</th><th>change</th></tr></thead>';
|
||||
echo '<tbody class="aqi-table">';
|
||||
foreach($rows as $row) {
|
||||
echo '<tr>';
|
||||
foreach($row as $cell) {
|
||||
echo '<td>' . $cell . '</td>';
|
||||
}
|
||||
echo '</tr>';
|
||||
}
|
||||
echo '</tbody>';
|
||||
echo '</table></div>';
|
||||
echo '</div>';
|
||||
}
|
||||
?>
|
||||
<!-- list end -->
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -5,4 +5,4 @@ FROM nginx
|
|||
RUN rm /etc/nginx/conf.d/default.conf
|
||||
|
||||
# Replace with our own nginx.conf
|
||||
COPY nginx.conf /etc/nginx/conf.d/
|
||||
COPY nginx.conf /etc/nginx/conf.d/
|
|
@ -0,0 +1,19 @@
|
|||
FROM python
|
||||
|
||||
COPY . /srv/flask_app
|
||||
WORKDIR /srv/flask_app
|
||||
|
||||
RUN mkdir -p static/dyn/monthly
|
||||
RUN chmod 777 -R .
|
||||
|
||||
RUN apt-get clean && apt-get -y update
|
||||
|
||||
RUN apt-get -y install python3-dev \
|
||||
&& apt-get -y install build-essential
|
||||
|
||||
RUN pip install -r requirements.txt --src /usr/local/src
|
||||
|
||||
|
||||
RUN chmod +x ./start.sh
|
||||
|
||||
CMD ["./start.sh"]
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"aqi_monitor": {
|
||||
"authUsername": "user",
|
||||
"authPassword": "password"
|
||||
},
|
||||
"openweathermap": {
|
||||
"api_key": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
||||
"lat": 40.71,
|
||||
"lon": -74.00
|
||||
},
|
||||
"postgres": {
|
||||
"db_host": "postgres",
|
||||
"db_database": "aqi",
|
||||
"db_user": "aqi",
|
||||
"db_password": "xxxxxxxxxxxxxxxxxxxx"
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
apscheduler
|
||||
APScheduler
|
||||
Flask_HTTPAuth
|
||||
Flask_Table
|
||||
Flask
|
||||
Flask-HTTPAuth
|
||||
matplotlib
|
||||
numpy
|
||||
pandas
|
|
@ -0,0 +1,245 @@
|
|||
""" handle db connections """
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
import psycopg2
|
||||
import requests
|
||||
|
||||
from src.helper import get_config
|
||||
|
||||
|
||||
class DatabaseConnect:
|
||||
""" handle db """
|
||||
|
||||
CONFIG = get_config()
|
||||
|
||||
def __init__(self):
|
||||
self.conn, self.cur = self.db_connect()
|
||||
|
||||
def db_connect(self):
|
||||
""" returns connection and curser """
|
||||
# Connect to database
|
||||
conn = psycopg2.connect(
|
||||
host=self.CONFIG['postgres']['db_host'],
|
||||
database=self.CONFIG['postgres']['db_database'],
|
||||
user=self.CONFIG['postgres']['db_user'],
|
||||
password=self.CONFIG['postgres']['db_password']
|
||||
)
|
||||
# Open a cursor to perform database operations
|
||||
cur = conn.cursor()
|
||||
return conn, cur
|
||||
|
||||
def db_execute(self, query):
|
||||
""" run a query """
|
||||
if isinstance(query, str):
|
||||
self.cur.execute(
|
||||
query
|
||||
)
|
||||
rows = self.cur.fetchall()
|
||||
elif isinstance(query, tuple):
|
||||
self.cur.execute(
|
||||
query[0], query[1]
|
||||
)
|
||||
rows = False
|
||||
|
||||
return rows
|
||||
|
||||
def db_close(self):
|
||||
""" clean close the conn and curser """
|
||||
self.conn.commit()
|
||||
self.cur.close()
|
||||
self.conn.close()
|
||||
|
||||
|
||||
class IngestLine:
|
||||
""" handle data input from monitor """
|
||||
|
||||
def __init__(self, data):
|
||||
self.aqi_query = None
|
||||
self.weather_query = None
|
||||
self.input_json = data
|
||||
self.add_aqi()
|
||||
self.add_timestamp()
|
||||
self.add_weather()
|
||||
self.add_query()
|
||||
|
||||
def add_aqi(self):
|
||||
""" add aqi_value and aqi_category keys from pm2.5 value """
|
||||
|
||||
aqi_breakpoints = [
|
||||
('Good', 0, 12.0, 0, 50),
|
||||
('Moderate', 12.1, 35.4, 51, 100),
|
||||
('Unhealthy for Sensitive Groups', 35.5, 55.4, 101, 150),
|
||||
('Unhealthy', 55.5, 150.4, 151, 200),
|
||||
('Very Unhealthy', 150.5, 250.4, 201, 300),
|
||||
('Hazardous', 250.5, 500.4, 301, 500),
|
||||
]
|
||||
|
||||
pm25 = self.input_json['pm25']
|
||||
for i in aqi_breakpoints:
|
||||
aqi_category, p_low, p_high, a_low, a_high = i
|
||||
if p_low < pm25 < p_high:
|
||||
# found it
|
||||
break
|
||||
|
||||
aqi = (a_high - a_low) / (p_high - p_low) * (pm25 - p_low) + a_low
|
||||
|
||||
aqi_dict = {
|
||||
'aqi_value': round(aqi),
|
||||
'aqi_category': aqi_category
|
||||
}
|
||||
|
||||
self.input_json.update(aqi_dict)
|
||||
|
||||
def add_timestamp(self):
|
||||
""" add timestamp to dict """
|
||||
now = datetime.now()
|
||||
timestamp = now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
epoch_time = int(now.strftime('%s'))
|
||||
|
||||
time_stamp_dict = {
|
||||
'time_stamp': timestamp,
|
||||
'epoch_time': epoch_time
|
||||
}
|
||||
|
||||
self.input_json.update(time_stamp_dict)
|
||||
|
||||
def add_weather(self):
|
||||
""" add weather data to dict """
|
||||
weather = Weather().last_weather
|
||||
self.input_json.update(weather)
|
||||
|
||||
def add_query(self):
|
||||
""" add aqi and weather query to self """
|
||||
aqi_keys = (
|
||||
'epoch_time', 'sensor_id', 'time_stamp', 'uptime',
|
||||
'pm25', 'pm10', 'aqi_value', 'aqi_category'
|
||||
)
|
||||
aqi_query = self.build_query(aqi_keys, 'aqi')
|
||||
weather_keys = (
|
||||
'epoch_time', 'sensor_id', 'time_stamp', 'temperature',
|
||||
'pressure', 'humidity', 'wind_speed', 'wind_direction',
|
||||
'weather_name', 'weather_icon'
|
||||
)
|
||||
weather_query = self.build_query(weather_keys, 'weather')
|
||||
self.aqi_query = aqi_query
|
||||
self.weather_query = weather_query
|
||||
|
||||
def build_query(self, keys, table):
|
||||
""" stitch query together for psycopg2 """
|
||||
keys_str = ', '.join(keys)
|
||||
valid = ', '.join(['%s' for i in keys])
|
||||
values = tuple(self.input_json[i] for i in keys)
|
||||
|
||||
query = (f'INSERT INTO {table} ({keys_str}) VALUES ({valid});', values)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
class Weather:
|
||||
""" handle weather lookup from API """
|
||||
|
||||
CONFIG = get_config()
|
||||
|
||||
def __init__(self):
|
||||
now = datetime.now()
|
||||
self.epoch_time = int(now.strftime('%s'))
|
||||
self.last_weather = self.get_weather()
|
||||
|
||||
def get_weather(self):
|
||||
""" get weather from disk or api if too old """
|
||||
try:
|
||||
last_dict = self.get_cache()
|
||||
except FileNotFoundError:
|
||||
# create for first time
|
||||
last_dict = self.get_openweather()
|
||||
last_epoch = last_dict['epoch_time']
|
||||
|
||||
if self.epoch_time - last_epoch > 10 * 60:
|
||||
print('get new weather data')
|
||||
weather = self.get_openweather()
|
||||
else:
|
||||
print('reuse weather data')
|
||||
weather = last_dict
|
||||
|
||||
del weather['epoch_time']
|
||||
|
||||
return weather
|
||||
|
||||
def get_openweather(self):
|
||||
""" get missing weatherdata from openweathermap api """
|
||||
api_key = self.CONFIG['openweathermap']['api_key']
|
||||
lat = self.CONFIG['openweathermap']['lat']
|
||||
lon = self.CONFIG['openweathermap']['lon']
|
||||
|
||||
url = ('https://api.openweathermap.org/data/2.5/weather' +
|
||||
f'?&units=metric&appid={api_key}&lat={lat}&lon={lon}')
|
||||
resp = requests.get(url, timeout=20).json()
|
||||
weather = {
|
||||
'weather_name': resp['weather'][0]['main'],
|
||||
'weather_icon': resp['weather'][0]['icon'],
|
||||
'wind_speed': resp['wind']['speed'],
|
||||
'wind_direction': resp['wind']['deg'],
|
||||
'epoch_time': self.epoch_time
|
||||
}
|
||||
self.write_cache(weather)
|
||||
|
||||
return weather
|
||||
|
||||
@staticmethod
|
||||
def get_cache():
|
||||
""" get last stored dict """
|
||||
with open('static/dyn/weather.json', 'r') as f:
|
||||
last = f.read()
|
||||
|
||||
last_dict = json.loads(last)
|
||||
return last_dict
|
||||
|
||||
@staticmethod
|
||||
def write_cache(weather):
|
||||
""" update last stored value """
|
||||
weather_str = json.dumps(weather)
|
||||
with open('static/dyn/weather.json', 'w') as f:
|
||||
f.write(weather_str)
|
||||
|
||||
|
||||
def get_current():
|
||||
""" get last values from db """
|
||||
|
||||
db_handler = DatabaseConnect()
|
||||
aqi = db_handler.db_execute(
|
||||
'SELECT time_stamp, aqi_value, aqi_category \
|
||||
FROM aqi ORDER BY epoch_time DESC LIMIT 1;'
|
||||
)
|
||||
weather = db_handler.db_execute(
|
||||
'SELECT temperature, pressure, humidity, \
|
||||
wind_speed, weather_name, weather_icon \
|
||||
FROM weather ORDER BY epoch_time DESC LIMIT 1;'
|
||||
)
|
||||
db_handler.db_close()
|
||||
|
||||
json_dict = {
|
||||
"temperature": weather[0][0],
|
||||
"pressure": weather[0][1],
|
||||
"humidity": weather[0][2],
|
||||
"weather_name": weather[0][4],
|
||||
"weather_icon": weather[0][5],
|
||||
"timestamp": aqi[0][0],
|
||||
"aqi_value": aqi[0][1],
|
||||
"aqi_category": aqi[0][2],
|
||||
"wind_speed": weather[0][3]
|
||||
}
|
||||
json_data = json.dumps(json_dict)
|
||||
return json_data
|
||||
|
||||
|
||||
def insert_data(data):
|
||||
""" called from ingest route to make the db insert """
|
||||
|
||||
ingest = IngestLine(data)
|
||||
|
||||
db_handler = DatabaseConnect()
|
||||
_ = db_handler.db_execute(ingest.aqi_query)
|
||||
_ = db_handler.db_execute(ingest.weather_query)
|
||||
db_handler.db_close()
|
|
@ -0,0 +1,114 @@
|
|||
""" handle current graph export """
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
from src.db import DatabaseConnect
|
||||
from src.helper import get_config, plt_fill
|
||||
|
||||
|
||||
class CurrentPlot:
|
||||
""" recreate the last 3h plot """
|
||||
|
||||
CONFIG = get_config()
|
||||
FILENAME = 'static/dyn/current.png'
|
||||
|
||||
def __init__(self):
|
||||
self.now = datetime.now()
|
||||
self.rows = self.get_data()
|
||||
self.axis = None
|
||||
|
||||
def get_data(self):
|
||||
""" export from postgres """
|
||||
now_epoch = int(self.now.strftime('%s'))
|
||||
last_3h = now_epoch - 3 * 60 * 60
|
||||
|
||||
query = ('SELECT epoch_time, aqi_value FROM aqi '
|
||||
f'WHERE epoch_time > {last_3h} ORDER BY epoch_time DESC;')
|
||||
|
||||
db_handler = DatabaseConnect()
|
||||
rows = db_handler.db_execute(query)
|
||||
db_handler.db_close()
|
||||
|
||||
return rows
|
||||
|
||||
def build_title(self):
|
||||
""" build title from timestamps """
|
||||
|
||||
time_from = datetime.fromtimestamp(self.rows[-1][0]).strftime('%H:%M')
|
||||
time_until = datetime.fromtimestamp(self.rows[0][0]).strftime('%H:%M')
|
||||
plt_title = f'AQI values last 3h: {time_from} - {time_until}'
|
||||
|
||||
return plt_title
|
||||
|
||||
def build_axis(self):
|
||||
""" build plot axis """
|
||||
rows = self.rows
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows]
|
||||
y_aqi_values = [int(i[1]) for i in rows]
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('3min').mean()
|
||||
mean.interpolate(
|
||||
method='linear', limit=1, inplace=True, limit_area='inside'
|
||||
)
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%H:%M')
|
||||
mean['aqi'] = mean['aqi'].round()
|
||||
plt_title = self.build_title()
|
||||
# xticks
|
||||
x_ticks = []
|
||||
for num, i in enumerate(mean['timestamp']):
|
||||
minute = int(i.split(':')[1])
|
||||
if minute % 15 == 0:
|
||||
x_ticks.append(num)
|
||||
|
||||
axis = {
|
||||
'x': mean['timestamp'],
|
||||
'y': mean['aqi'],
|
||||
'x_ticks': x_ticks,
|
||||
'plt_title': plt_title
|
||||
}
|
||||
self.axis = axis
|
||||
|
||||
def write_plt(self):
|
||||
""" save plot to file """
|
||||
x = self.axis['x']
|
||||
y = self.axis['y']
|
||||
x_ticks = self.axis['x_ticks']
|
||||
# calc ticks
|
||||
y_max = np.ceil(y.max()/50)*50 + 50
|
||||
# setup plot
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y, color='#313131',)
|
||||
# fill colors
|
||||
plt_fill(plt, x, y)
|
||||
# handle passing ticks and lables separatly
|
||||
if len(x_ticks) == 2:
|
||||
plt.xticks(x_ticks[0], x_ticks[1])
|
||||
else:
|
||||
plt.xticks(x_ticks)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.title(self.axis['plt_title'], fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig(self.FILENAME, dpi=300)
|
||||
plt.figure()
|
||||
plt.close('all')
|
||||
|
||||
|
||||
def main():
|
||||
""" main function to export current plot """
|
||||
print('current graph export')
|
||||
current = CurrentPlot()
|
||||
if current.rows:
|
||||
current.build_axis()
|
||||
current.write_plt()
|
||||
else:
|
||||
print('no rows found to export current graph')
|
|
@ -0,0 +1,219 @@
|
|||
""" handle all monthly tasks """
|
||||
|
||||
import json
|
||||
from os import path
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
from src.db import DatabaseConnect
|
||||
from src.helper import plt_fill
|
||||
|
||||
|
||||
class MonthStatus:
|
||||
""" check what needs to be done """
|
||||
|
||||
def __init__(self):
|
||||
self.m_stamp, self.y_stamp = (None, None)
|
||||
self.get_epoch()
|
||||
self.found = self.check_needed()
|
||||
|
||||
def get_epoch(self):
|
||||
""" create relevant timestamps """
|
||||
# last month
|
||||
now = datetime.now()
|
||||
m_end = datetime(now.year, now.month, day=1) - timedelta(seconds=1)
|
||||
m_start = datetime(m_end.year, m_end.month, day=1)
|
||||
m_stamp = (int(m_start.strftime('%s')), int(m_end.strftime('%s')))
|
||||
# last year
|
||||
y_now = now.replace(year=now.year - 1)
|
||||
y_end = datetime(y_now.year, y_now.month, day=1) - timedelta(seconds=1)
|
||||
y_start = datetime(y_end.year, y_end.month, day=1)
|
||||
y_stamp = (int(y_start.strftime('%s')), int(y_end.strftime('%s')))
|
||||
# set
|
||||
self.m_stamp = m_stamp
|
||||
self.y_stamp = y_stamp
|
||||
|
||||
def check_needed(self):
|
||||
""" check if current months already exists """
|
||||
file_name = datetime.fromtimestamp(self.m_stamp[0]).strftime('%Y-%m')
|
||||
file_path = path.join('static/dyn/monthly', file_name + '.png')
|
||||
found = path.isfile(file_path)
|
||||
return found
|
||||
|
||||
|
||||
class MonthGenerator(MonthStatus):
|
||||
""" create the monthly graph and json table """
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.m_rows, self.y_rows = self.get_data()
|
||||
self.axis = self.build_axis()
|
||||
|
||||
def get_data(self):
|
||||
""" export from postgres """
|
||||
m_query = ('SELECT epoch_time, aqi_value FROM aqi WHERE '
|
||||
f'epoch_time > {self.m_stamp[0]} AND '
|
||||
f'epoch_time < {self.m_stamp[1]} '
|
||||
'ORDER BY epoch_time DESC;')
|
||||
y_query = ('SELECT epoch_time, aqi_value FROM aqi WHERE '
|
||||
f'epoch_time > {self.y_stamp[0]} AND '
|
||||
f'epoch_time < {self.y_stamp[1]} '
|
||||
'ORDER BY epoch_time DESC;')
|
||||
# make the call
|
||||
db_handler = DatabaseConnect()
|
||||
m_rows = db_handler.db_execute(m_query)
|
||||
y_rows = db_handler.db_execute(y_query)
|
||||
db_handler.db_close()
|
||||
return m_rows, y_rows
|
||||
|
||||
def build_axis(self):
|
||||
""" build axis from rows """
|
||||
# initial df
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.m_rows]
|
||||
y_aqi_values = [int(i[1]) for i in self.m_rows]
|
||||
data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('8h').mean().round()
|
||||
# reset timestamp to day
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%d %H:%M')
|
||||
mean.set_index('timestamp', inplace=True)
|
||||
# second df with last year data
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.y_rows]
|
||||
y_aqi_values = [int(i[1]) for i in self.y_rows]
|
||||
data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
y_mean = indexed.resample('8h').mean().round()
|
||||
# reset timestamp to day
|
||||
y_mean.reset_index(level=0, inplace=True)
|
||||
y_mean['timestamp'] = y_mean['timestamp'].dt.strftime('%d %H:%M')
|
||||
y_mean.set_index('timestamp', inplace=True)
|
||||
# merge the two
|
||||
mean['year_aqi'] = y_mean['year_aqi']
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean.sort_values(by='timestamp', ascending=True, inplace=True)
|
||||
# return axis
|
||||
axis = {
|
||||
'x': mean['timestamp'],
|
||||
'y_1': mean['now_aqi'],
|
||||
'y_2': mean['year_aqi']
|
||||
}
|
||||
return axis
|
||||
|
||||
def write_plt(self):
|
||||
""" write monthly plot """
|
||||
x = self.axis['x']
|
||||
y_1 = self.axis['y_1']
|
||||
y_2 = self.axis['y_2']
|
||||
# parse timestamp
|
||||
date_month = datetime.fromtimestamp(self.m_rows[-1][0]).date()
|
||||
date_title = date_month.strftime('%b %Y')
|
||||
date_file = date_month.strftime('%Y-%m')
|
||||
month_short = date_month.strftime('%b')
|
||||
file_name = 'static/dyn/monthly/' + date_file + '.png'
|
||||
# build ticks
|
||||
y_max = np.ceil(max(y_1.append(y_2)) / 50) * 50 + 50
|
||||
x_range = np.arange(0, len(x), step=9)
|
||||
last_day = int(x.max().split()[0])
|
||||
x_numbers = np.arange(1, last_day + 1, step=3)
|
||||
x_dates = [f'{str(i).zfill(2)} {month_short}' for i in x_numbers]
|
||||
x_ticks = x_range, x_dates
|
||||
# plot
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y_1, color='#313131', label='this year')
|
||||
plt.plot(
|
||||
x, y_2, color='#666666', linestyle='dashed', label='last year'
|
||||
)
|
||||
# fill colors
|
||||
plt_fill(plt, x, y_1)
|
||||
plt.xticks(x_ticks[0], x_ticks[1])
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.title(f'AQI values for: {date_title}', fontsize=20)
|
||||
plt.legend()
|
||||
plt.tight_layout()
|
||||
plt.savefig(file_name, dpi=300)
|
||||
plt.figure()
|
||||
|
||||
@staticmethod
|
||||
def get_aqi(val):
|
||||
""" helper function to get aqi category """
|
||||
breakpoints = [
|
||||
('Good', 0, 50),
|
||||
('Moderate', 50, 100),
|
||||
('Unhealthy for Sensitive Groups', 100, 150),
|
||||
('Unhealthy', 150, 200),
|
||||
('Very Unhealthy', 200, 300),
|
||||
('Hazardous', 300, 500),
|
||||
]
|
||||
|
||||
for break_point in breakpoints:
|
||||
category, min_val, max_val = break_point
|
||||
if min_val < val <= max_val:
|
||||
# found it
|
||||
break
|
||||
|
||||
return category
|
||||
|
||||
@staticmethod
|
||||
def get_change(m_val, y_val):
|
||||
""" helper function to get change on thresh """
|
||||
diff_avg = (m_val - y_val) / m_val
|
||||
if diff_avg <= -0.15:
|
||||
avg_change = 'down'
|
||||
elif diff_avg >= 0.15:
|
||||
avg_change = 'up'
|
||||
else:
|
||||
avg_change = 'same'
|
||||
return avg_change
|
||||
|
||||
def write_table(self):
|
||||
""" write json file with monthly details """
|
||||
date_month = datetime.fromtimestamp(self.m_rows[-1][0]).date()
|
||||
date_file = date_month.strftime('%Y-%m')
|
||||
file_name = 'static/dyn/monthly/' + date_file + '.json'
|
||||
# current
|
||||
m_min = int(self.axis['y_1'].min())
|
||||
m_max = int(self.axis['y_1'].max())
|
||||
m_avg = int(self.axis['y_1'].mean())
|
||||
m_cat = self.get_aqi(m_avg)
|
||||
# last
|
||||
y_min = int(self.axis['y_2'].min())
|
||||
y_max = int(self.axis['y_2'].max())
|
||||
y_avg = int(self.axis['y_2'].mean())
|
||||
y_cat = self.get_aqi(y_avg)
|
||||
# build dict
|
||||
monthly_dict = {
|
||||
'data': [
|
||||
['min: ', m_min, y_min, self.get_change(m_min, y_min)],
|
||||
['max: ', m_max, y_max, self.get_change(m_max, y_max)],
|
||||
['avg: ', m_avg, y_avg, self.get_change(m_avg, y_avg)],
|
||||
['avg aqi: ', m_cat, y_cat, self.get_change(m_avg, y_avg)]
|
||||
]
|
||||
}
|
||||
# write to disk
|
||||
with open(file_name, 'w') as f:
|
||||
f.write(json.dumps(monthly_dict))
|
||||
|
||||
|
||||
def main():
|
||||
""" main to export monthly graph an table json """
|
||||
# check if needed
|
||||
month_status = MonthStatus()
|
||||
if month_status.found:
|
||||
print('monthly already created, skipping...')
|
||||
return
|
||||
|
||||
# create
|
||||
print('creating monthly graph and json file')
|
||||
month_generator = MonthGenerator()
|
||||
month_generator.write_plt()
|
||||
month_generator.write_table()
|
|
@ -0,0 +1,483 @@
|
|||
""" handle nightly graph export """
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import scipy # pylint: disable=unused-import
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
from src.db import DatabaseConnect
|
||||
from src.helper import get_config, plt_fill
|
||||
|
||||
|
||||
class NightlyPlots:
|
||||
""" get nightly data """
|
||||
|
||||
CONFIG = get_config()
|
||||
|
||||
def __init__(self):
|
||||
self.now = datetime.now()
|
||||
print('get data from db')
|
||||
self.rows, self.y_rows = self.get_data()
|
||||
|
||||
@staticmethod
|
||||
def color_colums(y):
|
||||
""" helper function to color bar columns """
|
||||
|
||||
breakpoints = [
|
||||
('#85a762', 0, 50), # good
|
||||
('#d4b93c', 50, 100), # moderate
|
||||
('#e96843', 100, 150), # ufsg
|
||||
('#d03f3b', 150, 200), # unhealthy
|
||||
('#be4173', 200, 300), # vunhealthy
|
||||
('#714261', 300, 500), # hazardous
|
||||
]
|
||||
|
||||
colors = []
|
||||
for value in y:
|
||||
for break_point in breakpoints:
|
||||
color, min_val, max_val = break_point
|
||||
if min_val < value <= max_val:
|
||||
# found it
|
||||
colors.append(color)
|
||||
break
|
||||
|
||||
return colors
|
||||
|
||||
def get_data(self):
|
||||
""" export from postgres """
|
||||
# current
|
||||
day_until = int(self.now.date().strftime('%s'))
|
||||
day_from = day_until - 10 * 24 * 60 * 60
|
||||
query = ('SELECT epoch_time, aqi_value, pm25, pm10 FROM aqi WHERE '
|
||||
f'epoch_time > {day_from} AND epoch_time < {day_until} '
|
||||
'ORDER BY epoch_time DESC;')
|
||||
# last year
|
||||
y_until = day_until - 365 * 24 * 60 * 60
|
||||
y_from = y_until - 10 * 24 * 60 * 60
|
||||
y_query = ('SELECT epoch_time, aqi_value FROM aqi WHERE '
|
||||
f'epoch_time > {y_from} AND epoch_time < {y_until} '
|
||||
'ORDER BY epoch_time DESC;')
|
||||
db_handler = DatabaseConnect()
|
||||
rows = db_handler.db_execute(query)
|
||||
y_rows = db_handler.db_execute(y_query)
|
||||
db_handler.db_close()
|
||||
|
||||
return rows, y_rows
|
||||
|
||||
def recreate_last_7(self):
|
||||
""" last seven days """
|
||||
day_until = int(self.now.date().strftime('%s'))
|
||||
day_from = day_until - 7 * 24 * 60 * 60
|
||||
rows = [i for i in self.rows if day_from < i[0] < day_until]
|
||||
date_from = datetime.fromtimestamp(day_from).strftime('%d %b')
|
||||
date_until = datetime.fromtimestamp(day_until).strftime('%d %b')
|
||||
plt_title = f'AQI values from: {date_from} until {date_until}'
|
||||
_ = LastSevenDays(rows, plt_title)
|
||||
|
||||
def recreate_last_3(self):
|
||||
""" last three days """
|
||||
_ = LastThreeDays(self.rows, self.now)
|
||||
|
||||
def recreate_pm_chart(self):
|
||||
""" recreating pm2.5 and pm10 charts """
|
||||
_ = PmGraphs(self.rows)
|
||||
|
||||
def recreate_hour_bar(self):
|
||||
""" recreate hourly average through day bar chart """
|
||||
day_until = int(self.now.date().strftime('%s'))
|
||||
day_from = day_until - 3 * 24 * 60 * 60
|
||||
rows = [i for i in self.rows if day_from < i[0] < day_until]
|
||||
_ = HourBar(rows)
|
||||
|
||||
def recreate_year_comparison(self):
|
||||
""" recreate year comparison chart and table for json """
|
||||
_ = YearComparison(self.rows, self.y_rows)
|
||||
|
||||
|
||||
class LastSevenDays:
|
||||
""" recreate last 7 days """
|
||||
|
||||
FILENAME = 'static/dyn/last-7.png'
|
||||
|
||||
def __init__(self, rows, plt_title):
|
||||
print('recreating last seven days')
|
||||
self.plt_title = plt_title
|
||||
self.rows = rows
|
||||
self.axis = self.build_axis()
|
||||
self.write_plt()
|
||||
|
||||
def build_axis(self):
|
||||
""" calc x and y """
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows]
|
||||
y_aqi_values = [int(i[1]) for i in self.rows]
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('2h').mean()
|
||||
mean['avg'] = mean['aqi'].resample('1d').mean()
|
||||
mean['avg'] = mean.avg.shift(6)
|
||||
# set first and last
|
||||
mean['avg'][0] = (mean['avg'].iloc[6] + mean['aqi'][0]) / 2
|
||||
mean['avg'][-1] = (mean['avg'].iloc[-6] + mean['aqi'][-1]) / 2
|
||||
# smooth
|
||||
mean['avg'].interpolate(method='polynomial', order=3, inplace=True)
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%Y-%m-%d %H:%M')
|
||||
mean['aqi'] = mean['aqi'].round()
|
||||
mean['avg'] = mean['avg'].round()
|
||||
# make ticks
|
||||
x_range = np.arange(0, 84, step=12)
|
||||
x_date_time = pd.to_datetime(mean['timestamp']).dt.date.unique()
|
||||
x_dates = np.asarray([i.strftime('%d %b') for i in x_date_time])
|
||||
x_ticks = x_range, x_dates
|
||||
# set axis
|
||||
axis = {
|
||||
"x": mean['timestamp'],
|
||||
"y_1": mean['aqi'],
|
||||
"y_2": mean['avg'],
|
||||
"x_ticks": x_ticks,
|
||||
"plt_title": self.plt_title
|
||||
}
|
||||
return axis
|
||||
|
||||
def write_plt(self):
|
||||
""" write last 7 days plot to disk """
|
||||
x = self.axis['x']
|
||||
y_1 = self.axis['y_1']
|
||||
y_2 = self.axis['y_2']
|
||||
x_ticks = self.axis['x_ticks']
|
||||
y_max = np.ceil(max(y_1.append(y_2))/50)*50 + 50
|
||||
# plot
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y_1, color='#313131', label='2hour avg')
|
||||
plt.plot(x, y_2, color='#cc0000', label='daily avg')
|
||||
# fill colors
|
||||
plt_fill(plt, x, y_1)
|
||||
# ticks and plot
|
||||
plt.xticks(x_ticks[0], x_ticks[1])
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.title(self.axis['plt_title'], fontsize=20)
|
||||
plt.legend()
|
||||
plt.tight_layout()
|
||||
plt.savefig(self.FILENAME, dpi=300)
|
||||
plt.figure()
|
||||
|
||||
|
||||
class LastThreeDays:
|
||||
""" recreate last three days plot """
|
||||
|
||||
def __init__(self, rows, now):
|
||||
print('recreating last three days')
|
||||
self.y_max = None
|
||||
self.now = now
|
||||
self.rows = rows
|
||||
self.rebuild_last_three()
|
||||
|
||||
def rebuild_last_three(self):
|
||||
""" recreate all three graphs """
|
||||
# get axis
|
||||
all_axis = []
|
||||
for day in range(1, 4):
|
||||
axis = self.get_axis(day)
|
||||
all_axis.append(axis)
|
||||
# set y_max
|
||||
self.y_max = max([max(i['y']) for i in all_axis]) + 50
|
||||
# plt
|
||||
for idx, axis in enumerate(all_axis):
|
||||
day = idx + 1
|
||||
self.write_plt(axis, day)
|
||||
|
||||
def get_axis(self, day):
|
||||
""" get axis for day passed in """
|
||||
day_delta = self.now.date() - timedelta(days=day)
|
||||
day_from = int(day_delta.strftime('%s'))
|
||||
day_until = int(day_delta.strftime('%s')) + 60 * 60 * 24
|
||||
day_rows = [i for i in self.rows if day_from < i[0] < day_until]
|
||||
# title
|
||||
time_stamp = day_delta.strftime('%Y-%m-%d')
|
||||
# build
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in day_rows]
|
||||
y_aqi_values = [int(i[1]) for i in day_rows]
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('15min').mean()
|
||||
mean.interpolate(
|
||||
method='linear', limit=1, inplace=True, limit_area='inside'
|
||||
)
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%H:%M')
|
||||
mean['aqi'] = mean['aqi'].round()
|
||||
# set axis
|
||||
axis = {
|
||||
"x": mean['timestamp'],
|
||||
"y": mean['aqi'],
|
||||
"x_ticks": np.arange(0, 97, step=8),
|
||||
"plt_title": f'AQI values from: {time_stamp}'
|
||||
}
|
||||
return axis
|
||||
|
||||
def write_plt(self, axis, day):
|
||||
""" write daily plot to disk """
|
||||
x = axis['x']
|
||||
y = axis['y']
|
||||
x_ticks = np.arange(0, 97, step=8)
|
||||
plt.style.use('seaborn')
|
||||
plt.plot(x, y, color='#313131',)
|
||||
# fill colors
|
||||
plt_fill(plt, x, y)
|
||||
# ticks and plot
|
||||
plt.xticks(x_ticks)
|
||||
plt.yticks(np.arange(0, self.y_max, step=50))
|
||||
plt.title(axis['plt_title'], fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig(f'static/dyn/day-{day}.png', dpi=300)
|
||||
plt.figure()
|
||||
plt.close('all')
|
||||
|
||||
|
||||
class PmGraphs:
|
||||
""" recreate avg pm10 and pm2.5 exposure graphs """
|
||||
|
||||
def __init__(self, rows):
|
||||
print('recreating pm bar charts')
|
||||
self.rows = rows
|
||||
self.y_max = None
|
||||
self.axis = self.get_axis()
|
||||
self.write_plt(thresh=25, title='2.5')
|
||||
self.write_plt(thresh=50, title='10')
|
||||
|
||||
def get_axis(self):
|
||||
""" get pm2.5 and pm20 axis """
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows]
|
||||
y_pm25_values = [int(i[2]) for i in self.rows]
|
||||
y_pm10_values = [int(i[3]) for i in self.rows]
|
||||
data = {
|
||||
'timestamp': x_timeline,
|
||||
'pm25': y_pm25_values,
|
||||
'pm10': y_pm10_values
|
||||
}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True, ascending=True)
|
||||
mean = indexed.resample('1d').mean()
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
# axis
|
||||
axis = {
|
||||
'x': mean['timestamp'],
|
||||
'y_pm25': mean['pm25'].round(),
|
||||
'y_pm10': mean['pm10'].round()
|
||||
}
|
||||
# max
|
||||
self.y_max = np.ceil(
|
||||
max(axis['y_pm25'].append(axis['y_pm10'])) / 25
|
||||
) * 25 + 25
|
||||
return axis
|
||||
|
||||
def write_plt(self, thresh, title):
|
||||
""" write plot to disk """
|
||||
file_name = title.replace('.', '')
|
||||
plt_title = f'Daily avg PM {title} exposure'
|
||||
x = self.axis['x']
|
||||
y = self.axis['y_pm' + file_name]
|
||||
# make ticks
|
||||
x_range = np.arange(10).tolist()
|
||||
x_date_time = pd.to_datetime(x).dt.date.unique()
|
||||
x_dates = [i.strftime('%d %b') for i in x_date_time]
|
||||
# col
|
||||
col = []
|
||||
for val in y:
|
||||
if val < thresh:
|
||||
col.append('#6ecd65')
|
||||
else:
|
||||
col.append('#ff4d4d')
|
||||
# plot
|
||||
plt.style.use('seaborn')
|
||||
plt.bar(x_dates, y, color=col, width=0.5)
|
||||
plt.axhline(y=thresh, color='#6ecd65', linestyle=':')
|
||||
plt.xticks(ticks=x_range, labels=x_dates)
|
||||
plt.yticks(np.arange(0, self.y_max, step=25))
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig(f'static/dyn/pm{file_name}.png', dpi=300)
|
||||
plt.close('all')
|
||||
plt.figure()
|
||||
|
||||
|
||||
class HourBar:
|
||||
""" recreate hour by our avg bar chart """
|
||||
|
||||
def __init__(self, rows):
|
||||
print('recreating hour avg bar chart')
|
||||
self.rows = rows
|
||||
self.axis = self.get_axis()
|
||||
self.write_plt()
|
||||
|
||||
def get_axis(self):
|
||||
""" get hourly bar chart axis """
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows]
|
||||
y_aqi_values = [int(i[1]) for i in self.rows]
|
||||
data = {
|
||||
'timestamp': x_timeline,
|
||||
'aqi': y_aqi_values
|
||||
}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('1h').mean()
|
||||
# regroup by hour
|
||||
mean_hour = mean.groupby([mean.index.hour]).mean()
|
||||
mean_hour.reset_index(level=0, inplace=True)
|
||||
axis = {
|
||||
'x': mean_hour['timestamp'],
|
||||
'y': mean_hour['aqi'].round()
|
||||
}
|
||||
return axis
|
||||
|
||||
def write_plt(self):
|
||||
""" write the hour bar chart to disk """
|
||||
plt_title = 'Last three days average AQI for each hour'
|
||||
x = self.axis['x']
|
||||
y = self.axis['y']
|
||||
# ticks
|
||||
x_range = np.arange(0, 24, step=3)
|
||||
x_hours = [str(i).zfill(2) + ":00" for i in x_range]
|
||||
y_max = np.ceil(max(y)/50) * 50 + 50
|
||||
# color columns
|
||||
col = NightlyPlots.color_colums(y)
|
||||
# create plot
|
||||
plt.style.use('seaborn')
|
||||
plt.bar(x, y, color=col, width=0.5)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.xticks(ticks=x_range, labels=x_hours)
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig('static/dyn/hours.png', dpi=300)
|
||||
plt.close('all')
|
||||
plt.figure()
|
||||
|
||||
|
||||
class YearComparison:
|
||||
""" export year on year graph and table """
|
||||
|
||||
def __init__(self, rows, y_rows):
|
||||
print('recreating year comparison')
|
||||
self.rows = rows
|
||||
self.y_rows = y_rows
|
||||
self.axis = self.get_axis()
|
||||
self.write_table()
|
||||
self.write_plt()
|
||||
|
||||
def get_axis(self):
|
||||
""" build axis """
|
||||
# first df with current data
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.rows]
|
||||
y_aqi_values = [int(i[1]) for i in self.rows]
|
||||
data = {'timestamp': x_timeline, 'now_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('1d').mean().round()
|
||||
mean.reset_index(level=0, inplace=True)
|
||||
# second df with last year data
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in self.y_rows]
|
||||
y_aqi_values = [int(i[1]) for i in self.y_rows]
|
||||
data = {'timestamp': x_timeline, 'year_aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
year_mean = indexed.resample('1d').mean().round()
|
||||
year_mean.reset_index(level=0, inplace=True)
|
||||
# merge the two
|
||||
mean['year_aqi'] = year_mean['year_aqi']
|
||||
mean.sort_values(by='timestamp', inplace=True)
|
||||
mean['timestamp'] = mean['timestamp'].dt.strftime('%d %b')
|
||||
# build diff
|
||||
mean['diff'] = (mean['now_aqi'] - mean['year_aqi']) / mean['now_aqi']
|
||||
mean['change'] = np.where(
|
||||
mean['diff'].abs() < 0.15, 'same', mean['diff']
|
||||
)
|
||||
mean['change'] = np.where(
|
||||
mean['diff'] <= -0.15, 'down', mean['change']
|
||||
)
|
||||
mean['change'] = np.where(mean['diff'] >= 0.15, 'up', mean['change'])
|
||||
# return axis
|
||||
axis = {
|
||||
'x': mean['timestamp'],
|
||||
'y_1': mean['now_aqi'].astype('int'),
|
||||
'y_2': mean['year_aqi'].astype('int'),
|
||||
'change': mean['change']
|
||||
}
|
||||
return axis
|
||||
|
||||
def write_table(self):
|
||||
""" write year comparison table json """
|
||||
# build average row on top
|
||||
avg = int(self.axis['y_1'].mean())
|
||||
y_avg = int(self.axis['y_2'].mean())
|
||||
diff_avg = (avg - y_avg) / avg
|
||||
if diff_avg <= -0.15:
|
||||
avg_change = 'down'
|
||||
elif diff_avg >= 0.15:
|
||||
avg_change = 'up'
|
||||
else:
|
||||
avg_change = 'same'
|
||||
avg_row = ('avg 7 days', avg, y_avg, avg_change)
|
||||
# zip it
|
||||
zipped = zip(
|
||||
self.axis['x'], self.axis['y_1'],
|
||||
self.axis['y_2'], self.axis['change']
|
||||
)
|
||||
data_rows = list(zipped)
|
||||
data_rows.insert(0, avg_row)
|
||||
json_dict = json.dumps({"data": data_rows})
|
||||
# write to file
|
||||
with open('static/dyn/year-table.json', 'w') as f:
|
||||
f.write(json_dict)
|
||||
|
||||
def write_plt(self):
|
||||
""" write year comparison bar chart """
|
||||
x = self.axis['x']
|
||||
y_1 = self.axis['y_1']
|
||||
y_2 = self.axis['y_2']
|
||||
# build colors
|
||||
col_y_1 = NightlyPlots.color_colums(y_1)
|
||||
col_y_2 = NightlyPlots.color_colums(y_2)
|
||||
# set ticks
|
||||
y_max = int(np.ceil((max(y_1.append(y_2)) / 50)) * 50 + 50)
|
||||
x_indexes = np.arange(len(x))
|
||||
# build plot
|
||||
width = 0.25
|
||||
plt_title = 'Daily avg AQI values compared to last year'
|
||||
plt_suptitle = 'left: this year, right: last year'
|
||||
plt.style.use('seaborn')
|
||||
# write bars
|
||||
plt.bar(
|
||||
x_indexes - (width / 2) - 0.02, y_1, color=col_y_1, width=width
|
||||
)
|
||||
plt.bar(
|
||||
x_indexes + (width / 2) + 0.02, y_2, color=col_y_2, width=width
|
||||
)
|
||||
plt.title(plt_suptitle, fontsize=15)
|
||||
plt.suptitle(plt_title, fontsize=20, y=0.96)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.xticks(ticks=x_indexes, labels=x)
|
||||
plt.tight_layout()
|
||||
plt.savefig('static/dyn/year-graph.png', dpi=300)
|
||||
plt.figure()
|
||||
|
||||
|
||||
def main():
|
||||
""" collection of nightly exports """
|
||||
nightly = NightlyPlots()
|
||||
nightly.recreate_last_7()
|
||||
nightly.recreate_last_3()
|
||||
nightly.recreate_pm_chart()
|
||||
nightly.recreate_hour_bar()
|
||||
nightly.recreate_year_comparison()
|
|
@ -0,0 +1,78 @@
|
|||
""" collection of helper function and classes """
|
||||
|
||||
import json
|
||||
|
||||
from flask_table import create_table, Col
|
||||
|
||||
|
||||
def get_config():
|
||||
""" read config file """
|
||||
config_path = 'config.json'
|
||||
|
||||
with open(config_path, 'r') as config_file:
|
||||
data = config_file.read()
|
||||
|
||||
config = json.loads(data)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def plt_fill(plt, x, y):
|
||||
""" fill colors between break points """
|
||||
plt.fill_between(
|
||||
x, y, y2=0, where=(y > 0), color='#85a762', interpolate=True
|
||||
) # good
|
||||
plt.fill_between(
|
||||
x, y, y2=50, where=(y > 50), color='#d4b93c', interpolate=True
|
||||
) # moderate
|
||||
plt.fill_between(
|
||||
x, y, y2=100, where=(y > 100), color='#e96843', interpolate=True
|
||||
) # ufsg
|
||||
plt.fill_between(
|
||||
x, y, y2=150, where=(y > 150), color='#d03f3b', interpolate=True
|
||||
) # unhealthy
|
||||
plt.fill_between(
|
||||
x, y, y2=200, where=(y > 200), color='#be4173', interpolate=True
|
||||
) # vunhealthy
|
||||
plt.fill_between(
|
||||
x, y, y2=300, where=(y > 300), color='#714261', interpolate=True
|
||||
) # hazardous
|
||||
plt.fill_between(
|
||||
x, y, y2=0, where=(y > 0), color='#ffffff', alpha=0.1, interpolate=True
|
||||
) # soft
|
||||
|
||||
|
||||
class Table:
|
||||
""" create html table from filename to pass to template """
|
||||
|
||||
COLUMNS = [' ', 'this year', 'last year', 'change']
|
||||
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
self.rows = self.get_rows()
|
||||
|
||||
def get_rows(self):
|
||||
""" read filename to build rows dict """
|
||||
|
||||
with open(self.filename, 'r') as json_file:
|
||||
json_raw = json_file.read()
|
||||
|
||||
table_json = json.loads(json_raw)
|
||||
|
||||
rows = []
|
||||
for i in table_json['data']:
|
||||
row = dict(zip(self.COLUMNS, i))
|
||||
rows.append(row)
|
||||
|
||||
return rows
|
||||
|
||||
def create_table(self):
|
||||
""" create the table with rows and columns """
|
||||
|
||||
blank_table = create_table(options={'classes': ['comp-table']})
|
||||
|
||||
for i in self.COLUMNS:
|
||||
blank_table.add_column(i, Col(i))
|
||||
|
||||
table_obj = blank_table(self.rows)
|
||||
return table_obj
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
uwsgi --ini uwsgi.ini
|
|
@ -0,0 +1,365 @@
|
|||
* {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Rubik-Bold;
|
||||
src: url(../font/Rubik-Bold.ttf);
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Rubik-Light;
|
||||
src: url(../font/Rubik-Light.ttf);
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Rubik-Regular;
|
||||
src: url(../font/Rubik-Regular.ttf);
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-family: Rubik-Bold;
|
||||
font-size: 3em;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-family: Rubik-Bold;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-family: Rubik-Light;
|
||||
font-size: 1.3em;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
p, li, td, th {
|
||||
font-family: Rubik-Regular;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
font-family: Rubik-Regular;
|
||||
}
|
||||
|
||||
.block-text {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
.full-width {
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.content {
|
||||
width: 70%;
|
||||
max-width: 900px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
padding: 20px 0;
|
||||
}
|
||||
|
||||
.content-subtitle {
|
||||
max-width: 900px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
padding-top: 20px;
|
||||
}
|
||||
|
||||
.col-box {
|
||||
width: 70%;
|
||||
min-height: 20px;
|
||||
max-width: 900px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.preload {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
width: 100%;
|
||||
height: 100vh;
|
||||
background: #e6e6e6;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.preload img {
|
||||
width: 100px;
|
||||
}
|
||||
|
||||
.preload-finish {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.good {
|
||||
background-color: #85a762;
|
||||
}
|
||||
.moderate {
|
||||
background-color: #d4b93c;
|
||||
}
|
||||
.ufsg {
|
||||
background-color: #e96843;
|
||||
}
|
||||
.unhealthy {
|
||||
background-color: #d03f3b;
|
||||
}
|
||||
.vunhealthy {
|
||||
background-color: #be4173;
|
||||
}
|
||||
.hazardous {
|
||||
background-color: #714261;
|
||||
}
|
||||
|
||||
/* title start */
|
||||
.title {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.title-nav ul {
|
||||
list-style-type:none;
|
||||
}
|
||||
|
||||
.title-nav li {
|
||||
padding: 5px 40px;
|
||||
margin: 5px;
|
||||
border-style: none none solid none;
|
||||
border-width: 2px;
|
||||
}
|
||||
|
||||
.title-nav a {
|
||||
text-decoration: none;
|
||||
}
|
||||
/* title end */
|
||||
|
||||
/* cloud start */
|
||||
.cloud {
|
||||
position: relative;
|
||||
padding-bottom: 30px;
|
||||
}
|
||||
|
||||
.cloud img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.aqi_box {
|
||||
position: absolute;
|
||||
top: 45%;
|
||||
left: 57%;
|
||||
text-align: right;
|
||||
transform: translate(-50%, -50%);
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.aqi_box h1 {
|
||||
font-size: 15em;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.aqi_box h2 {
|
||||
font-family: Rubik-Light;
|
||||
font-size: 1.8em;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.aqi_box p {
|
||||
margin: 0;
|
||||
}
|
||||
/* cloud end */
|
||||
|
||||
/* weather start */
|
||||
.weather {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.weather_box {
|
||||
border: solid 2px;
|
||||
border-radius: 20px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
padding: 10px 0;
|
||||
width: 24%;
|
||||
}
|
||||
|
||||
.weather_icon img {
|
||||
width: 50px;
|
||||
margin: 10px;
|
||||
}
|
||||
/* weather end */
|
||||
/* category start */
|
||||
.category {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.cat-icon img {
|
||||
width: 100%;
|
||||
max-width: 200px;
|
||||
}
|
||||
.cat-item {
|
||||
color: #fff;
|
||||
margin: 10px;
|
||||
padding: 15px;
|
||||
text-align: center;
|
||||
}
|
||||
.cat-item.active {
|
||||
transform: scale(1.2);
|
||||
font-size: 1.1em;
|
||||
margin: 15px;
|
||||
box-shadow: darkgray 5px 5px;
|
||||
font-weight: bold;
|
||||
}
|
||||
/* category end */
|
||||
|
||||
/* last graph start */
|
||||
.graph {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: initial;
|
||||
}
|
||||
|
||||
.graph-item {
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.graph-item img {
|
||||
width: 100%;
|
||||
}
|
||||
/* last graph end */
|
||||
|
||||
/* table start */
|
||||
.comp-table {
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.comp-table thead th {
|
||||
padding: 5px 0;
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.comp-table tbody td:nth-child(1) {
|
||||
padding: 3px 0 3px 10px;
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.comp-table td:nth-child(2),
|
||||
.comp-table td:nth-child(3),
|
||||
.comp-table td:nth-child(4) {
|
||||
text-align: center;
|
||||
color: #fff;
|
||||
}
|
||||
/* table end */
|
||||
|
||||
/* about start */
|
||||
.category-table th {
|
||||
text-align: left;
|
||||
padding: 0 10px;
|
||||
min-width: 150px;
|
||||
}
|
||||
|
||||
.category-table td {
|
||||
padding: 10px;
|
||||
min-width: 150px;
|
||||
}
|
||||
|
||||
.category-table td:nth-child(1) {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.category-table tr {
|
||||
display: block;
|
||||
margin: 10px 0;
|
||||
}
|
||||
/* about end */
|
||||
|
||||
/* responsiv start */
|
||||
@media screen and (max-width: 1000px) {
|
||||
.content {
|
||||
width: 90%;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (max-width: 600px) {
|
||||
h1 {
|
||||
font-size: 2.5em;
|
||||
}
|
||||
.content {
|
||||
width: 100%;
|
||||
}
|
||||
.title {
|
||||
flex-direction: column-reverse;
|
||||
width: 100%;
|
||||
padding-top: 0;
|
||||
}
|
||||
.title-nav li {
|
||||
float: left;
|
||||
padding: 10px;
|
||||
}
|
||||
#aqiValue {
|
||||
font-size: 6em;
|
||||
}
|
||||
.aqi_box {
|
||||
top: 38%;
|
||||
left: 50%;
|
||||
width: 60%;
|
||||
}
|
||||
.weather {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.weather_box {
|
||||
width: 47%;
|
||||
}
|
||||
.weather_box:nth-child(1),
|
||||
.weather_box:nth-child(2) {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.category {
|
||||
display: block;
|
||||
text-align: center;
|
||||
}
|
||||
.cat-item {
|
||||
width: 70%;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
.cat-item.active {
|
||||
margin: 20px auto;
|
||||
}
|
||||
.cat-icon img {
|
||||
max-width: unset;
|
||||
width: 70%;
|
||||
margin: 20px 0;
|
||||
}
|
||||
.graph {
|
||||
display: block;
|
||||
padding: 0;
|
||||
}
|
||||
.graph-item {
|
||||
margin: 20px 0;
|
||||
}
|
||||
.category-table td {
|
||||
min-width: 80px;
|
||||
}
|
||||
}
|
||||
/* responsiv end */
|
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 41 KiB |
Before Width: | Height: | Size: 280 KiB After Width: | Height: | Size: 280 KiB |
Before Width: | Height: | Size: 7.0 KiB After Width: | Height: | Size: 7.0 KiB |
Before Width: | Height: | Size: 6.6 KiB After Width: | Height: | Size: 6.6 KiB |
Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB |
Before Width: | Height: | Size: 5.3 KiB After Width: | Height: | Size: 5.3 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 4.9 KiB After Width: | Height: | Size: 4.9 KiB |
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
Before Width: | Height: | Size: 7.9 KiB After Width: | Height: | Size: 7.9 KiB |
Before Width: | Height: | Size: 8.3 KiB After Width: | Height: | Size: 8.3 KiB |
Before Width: | Height: | Size: 8.3 KiB After Width: | Height: | Size: 8.3 KiB |
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 8.5 KiB After Width: | Height: | Size: 8.5 KiB |
Before Width: | Height: | Size: 8.5 KiB After Width: | Height: | Size: 8.5 KiB |
Before Width: | Height: | Size: 9.7 KiB After Width: | Height: | Size: 9.7 KiB |
Before Width: | Height: | Size: 9.7 KiB After Width: | Height: | Size: 9.7 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 280 B After Width: | Height: | Size: 280 B |
Before Width: | Height: | Size: 8.3 KiB After Width: | Height: | Size: 8.3 KiB |
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
Before Width: | Height: | Size: 246 KiB After Width: | Height: | Size: 246 KiB |
|
@ -64,9 +64,9 @@ function rmPreload() {
|
|||
// scrollbar
|
||||
document.querySelector('body').style.overflow = 'unset'
|
||||
// sticky
|
||||
const topBar = document.querySelector('.colorbox');
|
||||
topBar.style.position = 'sticky';
|
||||
topBar.style.position = '-webkit-sticky';
|
||||
// const topBar = document.querySelector('.colorbox');
|
||||
// topBar.style.position = 'sticky';
|
||||
// topBar.style.position = '-webkit-sticky';
|
||||
}
|
||||
|
||||
// reload current.png from remote
|
||||
|
@ -88,7 +88,7 @@ function refreshAqiValues() {
|
|||
return new Promise((resolve, reject) => {
|
||||
var req = new XMLHttpRequest();
|
||||
req.responseType = 'json';
|
||||
req.open('GET', '/dyn/air.json', true);
|
||||
req.open('GET', '/data/out', true);
|
||||
req.setRequestHeader('cache-control', 'no-cache');
|
||||
req.onload = function() {
|
||||
var responseAqi = req.response;
|
||||
|
@ -149,7 +149,7 @@ function setAqiColors(aqiCategory) {
|
|||
};
|
||||
}
|
||||
// apply light background change
|
||||
var lightBg = document.getElementsByClassName('light_background');
|
||||
var lightBg = document.getElementsByClassName('col_lightbg');
|
||||
if (lightBg) {
|
||||
for (var i = 0; i < lightBg.length; i++) {
|
||||
lightBg[i].style.backgroundColor = colSecond;
|
||||
|
@ -170,7 +170,7 @@ function setAqiColors(aqiCategory) {
|
|||
};
|
||||
}
|
||||
// apply hover color
|
||||
var css = '.nav li:hover {background-color: ' + colMain + ';}';
|
||||
var css = '.title-nav li:hover {background-color: ' + colMain + ';}';
|
||||
var style = document.createElement('style');
|
||||
style.appendChild(document.createTextNode(css));
|
||||
document.getElementsByTagName('head')[0].appendChild(style);
|
||||
|
@ -185,7 +185,7 @@ function setWeatherDetails(responseAqi) {
|
|||
var humidity = Math.round(responseAqi['humidity']);
|
||||
var pressure = Math.round(responseAqi['pressure']);
|
||||
// weather icon
|
||||
weatherIconSrc = '/img/icon/' + weatherIcon + '.png';
|
||||
weatherIconSrc = '/static/img/icon/' + weatherIcon + '.png';
|
||||
document.getElementById('weather_icon').src = weatherIconSrc;
|
||||
// weather name
|
||||
document.getElementById('weather_name').innerHTML = weatherName;
|
||||
|
@ -203,7 +203,7 @@ function setDesc(responseAqi) {
|
|||
// parse response
|
||||
var aqiCategory = responseAqi['aqi_category'];
|
||||
var aqiCatClean = aqiCategory.toLowerCase().replaceAll(' ', '');
|
||||
var iconSrc = '/img/icon/category-' + aqiCatClean + ".png";
|
||||
var iconSrc = '/static/img/icon/category-' + aqiCatClean + ".png";
|
||||
// parse config
|
||||
var aqiRange = colorConfig[aqiCategory][3];
|
||||
var aqiDesc = colorConfig[aqiCategory][4];
|
||||
|
@ -220,7 +220,7 @@ function setDesc(responseAqi) {
|
|||
// figure out which to activate
|
||||
var allCategories = Object.keys(colorConfig);
|
||||
var indexMatch = allCategories.indexOf(aqiCategory);
|
||||
var activeCat = document.getElementsByClassName('desc_item')[indexMatch];
|
||||
var activeCat = document.getElementsByClassName('cat-item')[indexMatch];
|
||||
// activate
|
||||
activeCat.classList.add("active");
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block content %}
|
||||
<div class="full-width">
|
||||
<div class="content">
|
||||
<h1>About</h1>
|
||||
<p class="block-text">This page and its contents are still under construction. More content is coming soon.</p>
|
||||
<p class="block-text">The data for this page is collected from an air monitor located just outside of Luang Prabang, Laos. While we do our best, no guarantee is given for the accuracy of this data.</p>
|
||||
<p class="block-text">The data is updated every 3 minutes. Contrary to international websites who measure the air pollution via satellite images and rely on estimates and averages, an on-site air monitor delivers real time values that paint a much more accurate picture of the local situation.</p>
|
||||
<p class="block-text">Roughly, the Air Quality Index (AQI) is the internationally used air quality standard to measure the pollution of the air. It is divided into 6 levels, and according to these levels, certain health advices are given:</p>
|
||||
</div>
|
||||
<div class="content">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<table class="category-table">
|
||||
<tr>
|
||||
<th>AQI Values</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="good">0 to 50:</td>
|
||||
<td>Good: No health concerns, enjoy activities.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="moderate">51 - 100:</td>
|
||||
<td>Moderate: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="ufsg">101 - 150:</td>
|
||||
<td>Unhealthy for Sensitive Groups: Active children and adults, and people with respiratory disease, such as asthma, should limit prolonged outdoor exertion.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="unhealthy">151 - 200:</td>
|
||||
<td>Unhealthy: Everyone may begin to experience health effects: Active children and adults, and people with respiratory disease, such as asthma, should avoid prolonged outdoor exertion; everyone else, especially children, should limit prolonged outdoor exertion.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="vunhealthy">201 - 300:</td>
|
||||
<td>Very Unhealthy: Active children and adults, and people with respiratory disease, such as asthma, should avoid all outdoor exertion; everyone else, especially children, should limit outdoor exertion.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="hazardous">301 - 500:</td>
|
||||
<td>Hazardous: Everyone should avoid all outdoor exertion.</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<div class="content">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2>Credits</h2>
|
||||
<p class="block-text">Partial Weather data, namely weather icon, weather description and windspeed are provided from <a href="https://openweathermap.org/ " target="_blank">openweather.org</a> API distributed under the <a href="https://openweathermap.org/full-price" target="_blank">Creative Commons Attribution-ShareAlike 4.0 Generic License</a>.</p>
|
||||
<p class="block-text"><a target="_blank" href="https://github.com/lokesh/lightbox2">Lightbox</a> made by Lokesh Dhakar, released under the <a target="_blank" href="https://raw.githubusercontent.com/lokesh/lightbox2/master/LICENSE">MIT license</a>.</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock content %}
|
|
@ -0,0 +1,103 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block content %}
|
||||
<div class="full-width">
|
||||
<div class="content">
|
||||
<h1>Graphs</h1>
|
||||
<p>All the graphs and table on this page will get recreated every night with the newest values.</p>
|
||||
</div>
|
||||
<div class="content-subtitle">
|
||||
<h2>Last three days</h2>
|
||||
</div>
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
<p>Three days ago</p>
|
||||
<a href="/static/dyn/day-3.png" data-lightbox="graph">
|
||||
<img src="/static/dyn/day-3.png" alt="Three days ago graph">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<p>Two days ago</p>
|
||||
<a href="/static/dyn/day-2.png" data-lightbox="graph">
|
||||
<img src="/static/dyn/day-2.png" alt="Two days ago graph">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<p>Yesterday</p>
|
||||
<a href="/static/dyn/day-1.png" data-lightbox="graph">
|
||||
<img src="/static/dyn/day-1.png" alt="Yesterday graph">
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2>Particle Matter sizes</h2>
|
||||
<p class="block-text"><b>There is no healthy level of pollution.</b> Particle matter (PM) are defined in two different sizes: PM 2.5 which represents particle sizes smaller than 2.5 µm or less than 1/20th of the diameter of a human hair and PM 10 which represents particle sizer smaller than 10 µm or 1/5th of the diameter of a human hair.</p>
|
||||
<p class="block-text">The <a href="https://www.who.int/news-room/fact-sheets/detail/ambient-(outdoor)-air-quality-and-health" target="_blank">WHO</a> is providing more details on their website regarding particle matter and their health implications. On <a href="https://en.wikipedia.org/wiki/Particulates" target="blank">Wikipedia</a> there are some interesting links to studies for further reading.</p>
|
||||
</div>
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
<a href="/static/dyn/pm25.png" data-lightbox="pm">
|
||||
<img src="/static/dyn/pm25.png" alt="pm 2.5 value graph">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<h3>PM 2.5</h3>
|
||||
<p class="block-text">Particle matter sizes smaller than 2.5µm are the most problematic as these particles will find their way through the lungs into the bloodstream.</p>
|
||||
<p class="block-text">The WHO Air quality guideline values set a 25 µg/m³ 24-hour average as an upper level threshold. In the 10 days overview you can see:</p>
|
||||
<p class="block-text">Green: Daily average exposure below 25 µg/m³<br>
|
||||
Red: Daily average exposure above 25 µg/m³</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
<a href="/static/dyn/pm10.png" data-lightbox="pm">
|
||||
<img src="/static/dyn/pm10.png" alt="pm 10 value graph">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<h3>PM 10</h3>
|
||||
<p class="block-text">The threshold for the daily average PM 10 exposure is set to 50 µg/m³ by the WHO. Particles this size can penetrate and lodge deep inside the lungs but are too big to enter the blood stream. For this reason the threshold is higher.</p>
|
||||
<p class="block-text">In the 10 days overview you can see:</p>
|
||||
<p class="block-text">Green: Daily average exposure below 50 µg/m³<br>
|
||||
Red: Daily average exposure above 50 µg/m³</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content-subtitle">
|
||||
<h2>Hour by Hour</h2>
|
||||
</div>
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
<a href="/static/dyn/hours.png" data-lightbox="hours">
|
||||
<img src="/static/dyn/hours.png" alt="hour by hour graph">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<h3>Hourly AQI average</h3>
|
||||
<p class="block-text">The AQI value can change a lot during the day. This can depend on the wind, cooking on fire or just another additional source of pollution nearby.</p>
|
||||
<p class="block-text">In this chart you can see the average AQI for each hour. This data is based on the last three days. This data can help to visualize which hours in the past three days have been better or worse on average.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content">
|
||||
<hr class="col_border">
|
||||
</div>
|
||||
<div class="content">
|
||||
<h2>Compared to last year</h2>
|
||||
<p>This year's daily average AQI values from last 10 days compared to corresponding values from last year.</p>
|
||||
</div>
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
{{ table }}
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<a href="/static/dyn/year-graph.png" data-lightbox="year">
|
||||
<img src="/static/dyn/year-graph.png" alt="year comparison graph">
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock content %}
|
|
@ -0,0 +1,103 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block content %}
|
||||
<div class="full-width col_lightbg">
|
||||
<div class="content">
|
||||
<div class="cloud">
|
||||
<img src="{{ url_for('static', filename='img/cloud.png') }}" alt="cloud" class="col_filter">
|
||||
<div class="aqi_box">
|
||||
<h1 id="aqiValue"></h1>
|
||||
<p id="aqi-label">US AQI</p>
|
||||
<h2 id="aqiCategory"></h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="full-width">
|
||||
<div class="content weather">
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="{{ url_for('static', filename='img/icon/000.png') }}" alt="weather_icon" class="col_filter" id="weather_icon">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font"><span id="temperature"></span><span> °C</span></h3>
|
||||
<p id="weather_name"></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="{{ url_for('static', filename='img/icon/wind.png') }}" alt="wind_icon" class="col_filter">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font">Wind</h3>
|
||||
<p><span id="wind_speed"></span><span> km/h</span></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="{{ url_for('static', filename='img/icon/humidity.png') }}" alt="humidity_icon" class="col_filter">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font">Humidity</h3>
|
||||
<p><span id="humidity"></span><span> %</span></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="weather_box col_border">
|
||||
<div class="weather_icon">
|
||||
<img src="{{ url_for('static', filename='img/icon/pressure.png') }}" alt="pressure_icon" class="col_filter">
|
||||
</div>
|
||||
<div class="weather_text">
|
||||
<h3 class="col_font">Pressure</h3>
|
||||
<p><span id="pressure"></span><span> mbar</span></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="full-width col_lightbg">
|
||||
<div class="content category">
|
||||
<div class="cat-bar">
|
||||
<div class="cat-item good">
|
||||
<p>Good</p>
|
||||
</div>
|
||||
<div class="cat-item moderate">
|
||||
<p>Moderate</p>
|
||||
</div>
|
||||
<div class="cat-item ufsg">
|
||||
<p>Unhealthy for Sensitive Groups</p>
|
||||
</div>
|
||||
<div class="cat-item unhealthy">
|
||||
<p>Unhealthy</p>
|
||||
</div>
|
||||
<div class="cat-item vunhealthy">
|
||||
<p>Very Unhealthy</p>
|
||||
</div>
|
||||
<div class="cat-item hazardous">
|
||||
<p>Hazardous</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cat-icon">
|
||||
<img src="static/img/icon/category-plain.png" alt="category_icon" id="categoryIcon">
|
||||
</div>
|
||||
<div class="cat-desc">
|
||||
<h2 class="col_font" id="aqiName"></h2>
|
||||
<h3 id="aqiRange"></h3>
|
||||
<p id="aqiDesc"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="full-width">
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
<h3>Last three hours</h3>
|
||||
<a href="/static/dyn/current.png" data-lightbox="graph">
|
||||
<img src="/static/dyn/current.png" alt="current graph">
|
||||
</a>
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
<h3>Last 7 days</h3>
|
||||
<a href="/static/dyn/last-7.png" data-lightbox="graph">
|
||||
<img src="/static/dyn/last-7.png" alt="last-7 graph">
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock content %}
|
|
@ -0,0 +1,53 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{% if title %}
|
||||
<title>AQI - {{ title }}</title>
|
||||
{% else %}
|
||||
<title>AQI</title>
|
||||
{% endif %}
|
||||
<link rel="shortcut icon" href="{{ url_for('static', filename='favicon.ico') }}">
|
||||
<script src="{{ url_for('static', filename='js/lightbox.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/aqi.js') }}"></script>
|
||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='css/style.css') }}">
|
||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='css/lightbox.css') }}">
|
||||
</head>
|
||||
<body>
|
||||
<div class="preload">
|
||||
<img src="{{ url_for('static', filename='img/cloud_colors.gif') }}" alt="cloud_animation">
|
||||
</div>
|
||||
<div class="full-width col_bg">
|
||||
<div class="col-box">
|
||||
<!-- <ul>
|
||||
<p>Latest updates:</p>
|
||||
<li><span class="content-date">03.04.2021:</span> Added March summary on new <a href="/monthly">monthly</a> page</li>
|
||||
</ul> -->
|
||||
</div>
|
||||
</div>
|
||||
<div class="full-width col_lightbg">
|
||||
<div class="content title">
|
||||
<div class="title-text">
|
||||
<h1>Live Air Quality</h1>
|
||||
<h2>in Luang Prabang Laos PDR</h2>
|
||||
<p>Last updated: <span id="timestamp"></span></p>
|
||||
</div>
|
||||
<div class="title-nav">
|
||||
<ul>
|
||||
<a href="/"><li class="col_border">Home</li></a>
|
||||
<a href="/about"><li class="col_border">About</li></a>
|
||||
<a href="/graphs"><li class="col_border">Graphs</li></a>
|
||||
<a href="/monthly"><li class="col_border">Monthly</li></a>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% block content %}{% endblock %}
|
||||
<div class="full-width col_bg">
|
||||
<div class="col-box">
|
||||
<p>© 2021 | <a href="https://github.com/bbilly1/aqi_monitor" target="_blank">Documentation</a></p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,22 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block content %}
|
||||
<div class="full-width">
|
||||
<div class="content">
|
||||
<h1>Month by month</h1>
|
||||
<p>Month compared to last year. Values are in 8h average.</p>
|
||||
</div>
|
||||
{% for month in months %}
|
||||
<div class="content-subtitle">
|
||||
<h2>{{month.month_name}}</h2>
|
||||
</div>
|
||||
<div class="content graph">
|
||||
<div class="graph-item">
|
||||
<img src="{{month.month_graph}}" alt="{{month.month_name}} graph">
|
||||
</div>
|
||||
<div class="graph-item">
|
||||
{{month.table}}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endblock content %}
|
|
@ -1,5 +1,5 @@
|
|||
[uwsgi]
|
||||
wsgi-file = run.py
|
||||
wsgi-file = views.py
|
||||
callable = app
|
||||
socket = :8080
|
||||
processes = 4
|
||||
|
@ -7,4 +7,4 @@ threads = 2
|
|||
master = true
|
||||
chmod-socket = 660
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
die-on-term = true
|
|
@ -0,0 +1,114 @@
|
|||
""" main entry page to handle all the routes """
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from flask import Flask, render_template, request, app
|
||||
from flask import url_for # pylint: disable=unused-import
|
||||
from flask_httpauth import HTTPBasicAuth
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
|
||||
from src.helper import Table, get_config
|
||||
from src.db import get_current, insert_data
|
||||
from src.graph_current import main as current_graph
|
||||
from src.graph_nightly import main as nightly_graph
|
||||
from src.graph_monthly import main as monthly_graph
|
||||
|
||||
import matplotlib
|
||||
matplotlib.use('Agg')
|
||||
|
||||
# start up
|
||||
app = Flask(__name__)
|
||||
|
||||
CONFIG = get_config()
|
||||
auth = HTTPBasicAuth()
|
||||
aqi_user = CONFIG['aqi_monitor']
|
||||
USER_DATA = {
|
||||
aqi_user['authUsername']: aqi_user['authPassword']
|
||||
}
|
||||
|
||||
# initial export
|
||||
print('initial export')
|
||||
current_graph()
|
||||
nightly_graph()
|
||||
monthly_graph()
|
||||
|
||||
# start scheduler
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_job(
|
||||
current_graph, trigger="cron", minute='*/5', name='current_graph'
|
||||
)
|
||||
scheduler.add_job(
|
||||
nightly_graph, trigger="cron", day='*', hour='1', minute='1', name='night'
|
||||
)
|
||||
scheduler.add_job(
|
||||
nightly_graph, trigger="cron", day='*', hour='1', minute='2', name='month'
|
||||
)
|
||||
scheduler.start()
|
||||
|
||||
|
||||
@auth.verify_password
|
||||
def verify(username, password):
|
||||
""" get password """
|
||||
if not (username and password):
|
||||
return False
|
||||
return USER_DATA.get(username) == password
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def home():
|
||||
""" home page """
|
||||
return render_template('home.html')
|
||||
|
||||
|
||||
@app.route("/about")
|
||||
def about():
|
||||
""" about page """
|
||||
return render_template('about.html', title='About')
|
||||
|
||||
|
||||
@app.route("/graphs")
|
||||
def graphs():
|
||||
""" graphs page """
|
||||
table = Table('static/dyn/year-table.json').create_table()
|
||||
return render_template('graphs.html', title='Graphs', table=table)
|
||||
|
||||
|
||||
@app.route("/monthly")
|
||||
def monthly():
|
||||
""" monthly statistics page """
|
||||
months = [i for i in os.listdir('static/dyn/monthly') if '.json' in i]
|
||||
months.sort(reverse=True)
|
||||
|
||||
month_dicts = []
|
||||
for month in months:
|
||||
month_clean = os.path.splitext(month)[0]
|
||||
month_graph = os.path.join('static/dyn/monthly', month_clean + '.png')
|
||||
month_name = datetime.strptime(month_clean, "%Y-%m").strftime('%B %Y')
|
||||
month_json = os.path.join('static/dyn/monthly', month)
|
||||
table = Table(month_json).create_table()
|
||||
month_dict = {
|
||||
'month_graph': month_graph,
|
||||
'month_name': month_name,
|
||||
'table': table
|
||||
}
|
||||
month_dicts.append(month_dict)
|
||||
|
||||
return render_template('monthly.html', title='Monthly', months=month_dicts)
|
||||
|
||||
|
||||
@app.route("/data/in", methods=['POST'])
|
||||
@auth.login_required
|
||||
def ingest():
|
||||
""" handle post request from monitor """
|
||||
post_data = request.json
|
||||
insert_data(post_data)
|
||||
return 'ingest'
|
||||
|
||||
|
||||
@app.route("/data/out")
|
||||
def data():
|
||||
""" return data from db """
|
||||
json_data = get_current()
|
||||
return json_data
|