super().__init__(name_to_timeout_dict)
self.feed_site = "everout.com"
self.events = grab_bag.grab_bag()
+ self.pfilter = profanity_filter.ProfanityFilter()
def debug_prefix(self) -> str:
return "stranger"
def fetch_events(self) -> bool:
self.events.clear()
- feed_uris = [
- "/seattle/events/?page=1",
- "/seattle/events/?page=2",
- "/seattle/events/?page=3",
- ]
+ feed_uris = []
now = datetime.datetime.now()
ts = now + datetime.timedelta(1)
tomorrow = datetime.datetime.strftime(ts, "%Y-%m-%d")
feed_uris.append(f"/seattle/events/?start-date={next_sun}&page=1")
feed_uris.append(f"/seattle/events/?start-date={next_sun}&page=2")
- filter = profanity_filter.ProfanityFilter()
for uri in feed_uris:
try:
logger.debug("fetching 'https://%s%s'" % (self.feed_site, uri))
soup = BeautifulSoup(raw, "html.parser")
for x in soup.find_all("div", class_="row event list-item mb-3 py-3"):
text = x.get_text()
- if filter.contains_bad_word(text):
+ if self.pfilter.contains_bad_word(text):
continue
raw_str = str(x)
raw_str = raw_str.replace(