furmeet_events_whats_going_.../wgotv.py

214 lines
7.2 KiB
Python

#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import datetime
import json
import time
from pathlib import Path
from typing import Dict
import requests
SECONDS_FLIP = 8
MINUTES_UPDATE = 5
LANG = 'pt'
SERVER = 'events.furmeet.app'
CONVENTION = 'megaplex'
PATH_CACHE = Path('cached.json')
PATH_TS = Path('cached.txt')
FLDR_NEWS = Path('news')
PATH_NOW_ID = Path('now_i.txt')
PATH_NOW_TM = Path('now_0.txt')
PATH_NOW_TT = Path('now_1.txt')
PATH_NOW_ST = Path('now_2.txt')
PATH_NOW_PL = Path('now_3.txt')
PATH_NOW_DC = Path('now_4.txt')
PATH_COMING_ID = Path('coming_i.txt')
PATH_COMING_TM = Path('coming_0.txt')
PATH_COMING_TT = Path('coming_1.txt')
PATH_COMING_ST = Path('coming_2.txt')
PATH_COMING_PL = Path('coming_3.txt')
PATH_COMING_DC = Path('coming_4.txt')
def try_updating_crashing() -> None:
hdrs: Dict[str, str] = dict()
if PATH_TS.is_file():
hdrs['If-Modified-Since'] = PATH_TS.read_text('utf-8').strip()
resp = requests.get(
f'https://{SERVER}/api/conventions/{CONVENTION}/minified.json',
headers=hdrs,
timeout=(15, 15),
)
resp.raise_for_status()
if resp.status_code == 200: # OK
convention_data = resp.json()
PATH_CACHE.write_text(
json.dumps(convention_data),
'utf-8')
PATH_TS.write_text(
(resp.headers['Last-Modified'] or '').strip(),
'utf-8')
elif resp.status_code == 304: # NOT MODIFIED
pass
else:
raise FileNotFoundError()
def try_updating() -> None:
try:
return try_updating_crashing()
except Exception:
return None
def try_writing_file(pth: Path, cnt: str) -> bool:
try:
pth.write_text(cnt, 'utf-8')
return True
except Exception:
return False
def parsetz(o) -> datetime.timezone:
return datetime.timezone(datetime.timedelta(hours=o['hr'], minutes=o['mn']), name=o['name'])
def parsetm(o, tz: datetime.timezone) -> datetime.datetime:
return datetime.datetime(
year=o['yr'],
month=o['mo'],
day=o['dy'],
hour=o['hr'],
minute=o['mn'],
second=o['sc'],
tzinfo=tz
)
def flip(cached: dict, cycles: int):
FLDR_NEWS.mkdir(parents=True, exist_ok=True)
for i, announcement in enumerate(cached['announcements']):
news_1 = FLDR_NEWS.joinpath('%03d_title.txt' % i)
news_2 = FLDR_NEWS.joinpath('%03d_body.txt' % i)
try_writing_file(news_1, announcement['title'][LANG])
try_writing_file(news_2, announcement['body'][LANG])
del announcement
del i
current_edition = next(
filter(
lambda a: a['uuid'] == cached['featured'],
cached['editions']
),
None
)
if current_edition is None:
return
places_uuid2nm: Dict[str, str] = {x['uuid']: x['label'][LANG]
for x in current_edition['places']}
tz = parsetz(cached['timezone'])
activities = list()
for activity in current_edition['events']:
start = parsetm(activity['time_start'], tz)
end = parsetm(activity['time_end'], tz)
tt = (activity['title'] or dict()).get(LANG, '')
st = (activity['subtitle'] or dict()).get(LANG, '')
dc = (activity['description'] or dict()).get(LANG, '')
pl = ', '.join(map(places_uuid2nm.__getitem__, activity['places']))
activities.append(((start, end), (tt, st, dc), pl))
dtnow = datetime.datetime.now(tz=tz)
n_day_parts = current_edition['split_day_into_n_parts']
day_part_seconds = (24*3600)/n_day_parts
day_part_delta = datetime.timedelta(seconds=day_part_seconds)
dtnextsegst = datetime.datetime(
year=dtnow.year,
month=dtnow.month,
day=dtnow.day,
tzinfo=tz,
)
while dtnextsegst <= dtnow:
dtnextsegst += day_part_delta
dtnextseged = dtnextsegst + day_part_delta
activities_now = list()
activities_coming = list()
for activity_tm, activity_tx, activity_pl in activities:
if activity_tm[0] <= dtnow < activity_tm[1]:
activities_now.append((activity_tm, activity_tx, activity_pl))
if (
((dtnextsegst >= activity_tm[0]) and (dtnextsegst < activity_tm[1])) or
((dtnextseged > activity_tm[0]) and (dtnextseged < activity_tm[1])) or
((activity_tm[0] >= dtnextsegst) and (activity_tm[0] < dtnextseged)) or
((activity_tm[1] > dtnextsegst) and (activity_tm[1] < dtnextseged))
):
activities_coming.append((activity_tm, activity_tx, activity_pl))
if len(activities_now) <= 0:
try_writing_file(PATH_NOW_ID, '')
try_writing_file(PATH_NOW_TM, '')
try_writing_file(PATH_NOW_TT, '')
try_writing_file(PATH_NOW_ST, '')
try_writing_file(PATH_NOW_PL, '')
try_writing_file(PATH_NOW_DC, '')
else:
n = cycles % len(activities_now)
activity_cycle = activities_now[n]
se = (
activity_cycle[0][0].strftime("%H:%M") +
' - ' +
activity_cycle[0][1].strftime("%H:%M")
)
try_writing_file(PATH_NOW_ID, str(n+1))
try_writing_file(PATH_NOW_TM, se)
try_writing_file(PATH_NOW_TT, activity_cycle[1][0])
try_writing_file(PATH_NOW_ST, activity_cycle[1][1])
try_writing_file(PATH_NOW_PL, activity_cycle[1][2])
try_writing_file(PATH_NOW_DC, activity_cycle[2])
if len(activities_coming) <= 0:
try_writing_file(PATH_COMING_ID, '')
try_writing_file(PATH_COMING_TM, '')
try_writing_file(PATH_COMING_TT, '')
try_writing_file(PATH_COMING_ST, '')
try_writing_file(PATH_COMING_PL, '')
try_writing_file(PATH_COMING_DC, '')
else:
n = cycles % len(activities_coming)
activity_cycle = activities_coming[n]
se = (
activity_cycle[0][0].strftime("%H:%M") +
' - ' +
activity_cycle[0][1].strftime("%H:%M")
)
try_writing_file(PATH_COMING_ID, str(n+1))
try_writing_file(PATH_COMING_TM, se)
try_writing_file(PATH_COMING_TT, activity_cycle[1][0])
try_writing_file(PATH_COMING_ST, activity_cycle[1][1])
try_writing_file(PATH_COMING_PL, activity_cycle[1][2])
try_writing_file(PATH_COMING_DC, activity_cycle[2])
print(f'Cycle {cycles}')
def main():
try_updating()
initial_time = last_updated = last_flipped = time.time()
last_flipped -= SECONDS_FLIP*1
while True:
cached = json.loads(PATH_CACHE.read_text('utf-8'))
now = time.time()
if (MINUTES_UPDATE*60)-(now-last_updated) < 0:
try_updating()
last_updated = now
if (SECONDS_FLIP*1)-(now-last_flipped) < 0:
flip(cached, int((now-initial_time)//(SECONDS_FLIP*1)))
last_flipped = now
wait_time = max(
0,
min(
(MINUTES_UPDATE*60)-(now-last_updated),
(SECONDS_FLIP*1)-(now-last_flipped)
)
)
time.sleep(wait_time)
if __name__ == '__main__':
main()