from bs4 import BeautifulSoup import datetime import file_writer import grab_bag import httplib import page_builder import profanity_filter import random import re import renderer import sets class stranger_events_renderer(renderer.debuggable_abstaining_renderer): def __init__(self, name_to_timeout_dict): super(stranger_events_renderer, self).__init__(name_to_timeout_dict, True) self.feed_site = "everout.thestranger.com" self.events = grab_bag.grab_bag() def debug_prefix(self): return "stranger" def periodic_render(self, key): self.debug_print("called for action %s" % key) if key == "Fetch Events": return self.fetch_events() elif key == "Shuffle Events": return self.shuffle_events() else: raise error("Unknown operaiton") def get_style(self): return """ """ def shuffle_events(self): layout = page_builder.page_builder() layout.set_layout(page_builder.page_builder.LAYOUT_FOUR_ITEMS) layout.set_title("Stranger Events") layout.set_style(self.get_style()) subset = self.events.subset(4) if subset is None: self.debug_print("Not enough events to build page.") return False for msg in subset: layout.add_item(msg) f = file_writer.file_writer('stranger-events_2_none.html') layout.render_html(f) f.close() return True def fetch_events(self): self.events.clear() feed_uris = [ "/events/?page=1&picks=true", "/events/?page=2&picks=true", "/events/?page=3&picks=true", ] now = datetime.datetime.now() ts = now + datetime.timedelta(1) tomorrow = datetime.datetime.strftime(ts, "%Y-%m-%d") feed_uris.append("/events/?start-date=%s&picks=true" % tomorrow) delta = 5 - now.weekday() if delta <= 0: delta += 7 if delta > 1: ts = now + datetime.timedelta(delta) next_sat = datetime.datetime.strftime(ts, "%Y-%m-%d") feed_uris.append("/events/?start-date=%s&page=1&picks=true" % next_sat) feed_uris.append("/events/?start-date=%s&page=2&picks=true" % next_sat) delta += 1 if delta > 1: ts = now + datetime.timedelta(delta) next_sun = datetime.datetime.strftime(ts, "%Y-%m-%d") feed_uris.append("/events/?start-date=%s&page=1&picks=true" % next_sun) feed_uris.append("/events/?start-date=%s&page=2&picks=true" % next_sun) for uri in feed_uris: self.debug_print("fetching '%s'" % uri) self.conn = httplib.HTTPSConnection(self.feed_site) self.conn.request( "GET", uri, None, {"Accept-Charset": "utf-8"}) response = self.conn.getresponse() if response.status != 200: print("stranger: Failed, status %d" % (response.status)) continue raw = response.read() soup = BeautifulSoup(raw, "html.parser") filter = profanity_filter.profanity_filter() for x in soup.find_all('div', class_='row event list-item mb-3 py-3'): text = x.get_text(); if (filter.contains_bad_words(text)): continue #
#
# On Demand #
// col-12 #
# # # #
// col-md-3 order-1 order-md-3 #
#

# # # SPLIFF 2020 - On Demand # #

#
# Every day #
// event-date #
#
// event-time #
// col-md-6 order-2 order-md-1 event-details #
# // location-name #
#
// location-region # #
// col-md-3 order-3 order-md-2 location-colum #
// row event list-item mb-3 py-3 raw = unicode(x) raw = raw.replace('src="/', 'align="left" src="https://www.thestranger.com/') raw = raw.replace('href="/', 'href="https://www.thestranger.com/') raw = raw.replace('FREE', 'Free') raw = raw.replace('Save Event', '') raw = re.sub('^\s*$', '', raw, 0, re.MULTILINE) raw = re.sub('\n+', '\n', raw) raw = re.sub(']*class="calendar-post-ticket"[^<>]*>.*', '', raw, 0, re.DOTALL | re.IGNORECASE) self.events.add(raw.encode('utf-8')) self.debug_print("fetched %d events so far." % self.events.size()) return self.events.size() > 0 x = stranger_events_renderer({"Test", 123}) x.periodic_render("Fetch Events") x.periodic_render("Shuffle Events")