+#!/usr/bin/env python3
+
from bs4 import BeautifulSoup
import datetime
+import http.client
+import random
+import re
+from typing import Dict, List
+
import file_writer
import grab_bag
-import http.client
import page_builder
import profanity_filter
-import random
-import re
import renderer
import renderer_catalog
class stranger_events_renderer(renderer.debuggable_abstaining_renderer):
- def __init__(self, name_to_timeout_dict):
+ def __init__(self, name_to_timeout_dict: Dict[str, int]):
super(stranger_events_renderer, self).__init__(name_to_timeout_dict, True)
self.feed_site = "everout.com"
self.events = grab_bag.grab_bag()
- def debug_prefix(self):
+ def debug_prefix(self) -> str:
return "stranger"
- def periodic_render(self, key):
+ def periodic_render(self, key: str) -> bool:
self.debug_print("called for action %s" % key)
if key == "Fetch Events":
return self.fetch_events()
}
</STYLE>"""
- def shuffle_events(self):
+ def shuffle_events(self) -> bool:
layout = page_builder.page_builder()
layout.set_layout(page_builder.page_builder.LAYOUT_FOUR_ITEMS)
layout.set_title("Stranger Events")
for msg in subset:
layout.add_item(msg)
- f = file_writer.file_writer("stranger-events_2_36000.html")
- layout.render_html(f)
- f.close()
+ with file_writer.file_writer("stranger-events_2_36000.html") as f:
+ layout.render_html(f)
return True
- def fetch_events(self):
+ def fetch_events(self) -> bool:
self.events.clear()
feed_uris = [
"/stranger-seattle/events/?page=1",
now = datetime.datetime.now()
ts = now + datetime.timedelta(1)
tomorrow = datetime.datetime.strftime(ts, "%Y-%m-%d")
- feed_uris.append("/stranger-seattle/events/?start-date=%s" % tomorrow)
+ feed_uris.append(f"/stranger-seattle/events/?start-date={tomorrow}")
delta = 5 - now.weekday()
if delta <= 0:
delta += 7
if delta > 1:
ts = now + datetime.timedelta(delta)
next_sat = datetime.datetime.strftime(ts, "%Y-%m-%d")
- feed_uris.append(
- "/stranger-seattle/events/?start-date=%s&page=1" % next_sat
- )
- feed_uris.append(
- "/stranger-seattle/events/?start-date=%s&page=2" % next_sat
- )
+ feed_uris.append(f"/stranger-seattle/events/?start-date={next_sat}&page=1")
+ feed_uris.append(f"/stranger-seattle/events/?start-date={next_sat}&page=2")
delta += 1
if delta > 1:
ts = now + datetime.timedelta(delta)
next_sun = datetime.datetime.strftime(ts, "%Y-%m-%d")
- feed_uris.append(
- "/stranger-seattle/events/?start-date=%s&page=1" % next_sun
- )
- feed_uris.append(
- "/stranger-seattle/events/?start-date=%s&page=2" % next_sun
- )
+ feed_uris.append(f"/stranger-seattle/events/?start-date={next_sun}&page=1")
+ feed_uris.append(f"/stranger-seattle/events/?start-date={next_sun}&page=2")
+ filter = profanity_filter.profanity_filter()
for uri in feed_uris:
try:
self.debug_print("fetching 'https://%s%s'" % (self.feed_site, uri))
continue
soup = BeautifulSoup(raw, "html.parser")
- filter = profanity_filter.profanity_filter()
for x in soup.find_all("div", class_="row event list-item mb-3 py-3"):
text = x.get_text()
if filter.contains_bad_words(text):
raw = raw.replace("FREE", "Free")
raw = raw.replace("Save Event", "")
raw = re.sub("^\s*$", "", raw, 0, re.MULTILINE)
- # raw = re.sub('\n+', '\n', raw)
raw = re.sub(
'<span[^<>]*class="calendar-post-ticket"[^<>]*>.*</#span>',
"",
re.DOTALL | re.IGNORECASE,
)
self.events.add(raw)
- self.debug_print("fetched %d events so far." % self.events.size())
+ self.debug_print(f"fetched {self.events.size()} events so far.")
return self.events.size() > 0