backend creating bar chart for hourly avg aqi values
This commit is contained in:
parent
0fe1e73d17
commit
e3286af0cd
|
@ -80,9 +80,9 @@ def build_pm_plot(x, y, y_max, thresh, title):
|
|||
plt.figure()
|
||||
|
||||
|
||||
|
||||
def rebuild_pm_bar(config):
|
||||
""" rebuild pm2.5 and pm10 values """
|
||||
""" main function to rebuild pm2.5 and pm10 values """
|
||||
# get data
|
||||
rows = get_pm_data(config)
|
||||
x, y_1, y_2 = get_pm_axis(rows)
|
||||
# max
|
||||
|
@ -93,3 +93,89 @@ def rebuild_pm_bar(config):
|
|||
build_pm_plot(x, y_2, y_max, thresh=50, title='10')
|
||||
# done
|
||||
print('recreated PM 2.5 and PM 10 graphs')
|
||||
|
||||
|
||||
# hour bar chart
|
||||
def get_hour_data(config):
|
||||
""" get last three days worth of data from postgres """
|
||||
# time
|
||||
now = datetime.now()
|
||||
day_until = int(now.date().strftime('%s'))
|
||||
day_from = day_until - 3 * 24 * 60 * 60
|
||||
# call db
|
||||
conn, cur = db_connect(config)
|
||||
cur.execute(
|
||||
f'SELECT epoch_time, aqi_value FROM aqi \
|
||||
WHERE epoch_time > {day_from} \
|
||||
AND epoch_time < {day_until} \
|
||||
ORDER BY epoch_time DESC;'
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
db_close(conn, cur)
|
||||
return rows
|
||||
|
||||
|
||||
def get_hour_axis(rows):
|
||||
""" build x and y from the rows """
|
||||
x_timeline = [datetime.fromtimestamp(i[0]) for i in rows]
|
||||
y_aqi_values = [int(i[1]) for i in rows]
|
||||
# build dataframe
|
||||
data = {'timestamp': x_timeline, 'aqi': y_aqi_values}
|
||||
df = pd.DataFrame(data)
|
||||
indexed = df.set_index('timestamp')
|
||||
indexed.sort_values(by=['timestamp'], inplace=True)
|
||||
mean = indexed.resample('1h').mean()
|
||||
# regroup by hour
|
||||
mean_hour = mean.groupby([mean.index.hour]).mean()
|
||||
mean_hour.reset_index(level=0, inplace=True)
|
||||
# set axis
|
||||
x = mean_hour['timestamp']
|
||||
y = mean_hour['aqi'].round()
|
||||
return x, y
|
||||
|
||||
|
||||
def build_hour_plot(x, y):
|
||||
""" takes x and y and writes plot to file """
|
||||
plt_title = 'Last three days average AQI for each hour'
|
||||
# ticks
|
||||
x_range = np.arange(0, 24, step=3)
|
||||
x_hours = [str(i).zfill(2) + ":00" for i in x_range]
|
||||
y_max = np.ceil(max(y)/50) * 50 + 50
|
||||
# color the columns
|
||||
col = []
|
||||
for val in y:
|
||||
if val <= 50:
|
||||
# good
|
||||
col.append('#85a762')
|
||||
elif val > 50 and val <= 100:
|
||||
# moderate
|
||||
col.append('#d4b93c')
|
||||
elif val > 100 and val <= 150:
|
||||
# ufsg
|
||||
col.append('#e96843')
|
||||
elif val > 150 and val <= 200:
|
||||
# unhealthy
|
||||
col.append('#d03f3b')
|
||||
elif val > 200 and val <= 300:
|
||||
# vunhealthy
|
||||
col.append('#be4173')
|
||||
else:
|
||||
# hazardous
|
||||
col.append('#714261')
|
||||
# create plot
|
||||
plt.style.use('seaborn')
|
||||
plt.bar(x, y, color=col, width=0.5)
|
||||
plt.yticks(np.arange(0, y_max, step=50))
|
||||
plt.xticks(ticks=x_range, labels=x_hours)
|
||||
plt.title(plt_title, fontsize=20)
|
||||
plt.tight_layout()
|
||||
plt.savefig('dyn/hours.png', dpi=300)
|
||||
plt.close('all')
|
||||
plt.figure()
|
||||
|
||||
|
||||
def rebuild_hour_bar(config):
|
||||
""" main function to rebuild houly bar avg """
|
||||
rows = get_hour_data(config)
|
||||
x, y = get_hour_axis(rows)
|
||||
build_hour_plot(x, y)
|
||||
|
|
|
@ -42,6 +42,8 @@ graph.create_current(config)
|
|||
graph_pm.rebuild_pm_bar(config)
|
||||
graph.rebuild_3days(config)
|
||||
graph.rebuild_7days(config)
|
||||
graph_pm.rebuild_hour_bar(config)
|
||||
|
||||
# build username / pw dict for basic auth
|
||||
USER_DATA = {}
|
||||
USER_DATA[config['authUsername']] = config['authPassword']
|
||||
|
@ -64,6 +66,9 @@ scheduler.add_job(
|
|||
scheduler.add_job(
|
||||
graph_pm.rebuild_pm_bar, args=[config], trigger="cron", day='*', hour='1', minute='3', name='pm_bar'
|
||||
)
|
||||
scheduler.add_job(
|
||||
graph_pm.rebuild_hour_bar, args=[config], trigger="cron", day='*', hour='1', minute='4', name='hour_bar'
|
||||
)
|
||||
scheduler.start()
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue