--- /dev/null
+#!/usr/bin/env python3
+
+import difflib
+import logging
+import sys
+from typing import Dict, Optional, Tuple
+
+import string_utils
+
+logger = logging.getLogger(__name__)
+
+# https://en.wikipedia.org/wiki/ANSI_escape_code
+
+
+COLOR_NAMES_TO_RGB: Dict[str, Tuple[int, int, int]] = {
+ "wannabe.house": (0x00, 0x00, 95),
+ "cheetah.house": (95, 0x00, 0x00),
+ "meerkat.cabin": (95, 0x00, 95),
+ "backup.house": (175, 95, 0),
+ "kiosk.house": (90, 95, 0),
+ "rpi": (208, 95, 0),
+ "black": (0x00, 0x00, 0x00),
+ "navy blue": (0x00, 0x00, 0x80),
+ "dark blue": (0x00, 0x00, 0xC8),
+ "blue": (0x00, 0x00, 0xFF),
+ "stratos": (0x00, 0x07, 0x41),
+ "swamp": (0x00, 0x1B, 0x1C),
+ "resolution blue": (0x00, 0x23, 0x87),
+ "deep fir": (0x00, 0x29, 0x00),
+ "burnham": (0x00, 0x2E, 0x20),
+ "klein blue": (0x00, 0x2F, 0xA7),
+ "prussian blue": (0x00, 0x31, 0x53),
+ "midnight blue": (0x00, 0x33, 0x66),
+ "smalt": (0x00, 0x33, 0x99),
+ "deep teal": (0x00, 0x35, 0x32),
+ "cyprus": (0x00, 0x3E, 0x40),
+ "kaitoke green": (0x00, 0x46, 0x20),
+ "cobalt": (0x00, 0x47, 0xAB),
+ "crusoe": (0x00, 0x48, 0x16),
+ "sherpa blue": (0x00, 0x49, 0x50),
+ "endeavour": (0x00, 0x56, 0xA7),
+ "camarone": (0x00, 0x58, 0x1A),
+ "science blue": (0x00, 0x66, 0xCC),
+ "blue ribbon": (0x00, 0x66, 0xFF),
+ "tropical rain forest": (0x00, 0x75, 0x5E),
+ "allports": (0x00, 0x76, 0xA3),
+ "deep cerulean": (0x00, 0x7B, 0xA7),
+ "lochmara": (0x00, 0x7E, 0xC7),
+ "azure radiance": (0x00, 0x7F, 0xFF),
+ "teal": (0x00, 0x80, 0x80),
+ "bondi blue": (0x00, 0x95, 0xB6),
+ "pacific blue": (0x00, 0x9D, 0xC4),
+ "persian green": (0x00, 0xA6, 0x93),
+ "jade": (0x00, 0xA8, 0x6B),
+ "caribbean green": (0x00, 0xCC, 0x99),
+ "robin's egg blue": (0x00, 0xCC, 0xCC),
+ "green": (0x00, 0xFF, 0x00),
+ "spring green": (0x00, 0xFF, 0x7F),
+ "cyan": (0x00, 0xFF, 0xFF),
+ "aqua": (0x00, 0xFF, 0xFF),
+ "blue charcoal": (0x01, 0x0D, 0x1A),
+ "midnight": (0x01, 0x16, 0x35),
+ "holly": (0x01, 0x1D, 0x13),
+ "daintree": (0x01, 0x27, 0x31),
+ "cardin green": (0x01, 0x36, 0x1C),
+ "county green": (0x01, 0x37, 0x1A),
+ "astronaut blue": (0x01, 0x3E, 0x62),
+ "regal blue": (0x01, 0x3F, 0x6A),
+ "aqua deep": (0x01, 0x4B, 0x43),
+ "orient": (0x01, 0x5E, 0x85),
+ "blue stone": (0x01, 0x61, 0x62),
+ "fun green": (0x01, 0x6D, 0x39),
+ "pine green": (0x01, 0x79, 0x6F),
+ "blue lagoon": (0x01, 0x79, 0x87),
+ "deep sea": (0x01, 0x82, 0x6B),
+ "green haze": (0x01, 0xA3, 0x68),
+ "english holly": (0x02, 0x2D, 0x15),
+ "sherwood green": (0x02, 0x40, 0x2C),
+ "congress blue": (0x02, 0x47, 0x8E),
+ "evening sea": (0x02, 0x4E, 0x46),
+ "bahama blue": (0x02, 0x63, 0x95),
+ "observatory": (0x02, 0x86, 0x6F),
+ "cerulean": (0x02, 0xA4, 0xD3),
+ "tangaroa": (0x03, 0x16, 0x3C),
+ "green vogue": (0x03, 0x2B, 0x52),
+ "mosque": (0x03, 0x6A, 0x6E),
+ "midnight moss": (0x04, 0x10, 0x04),
+ "black pearl": (0x04, 0x13, 0x22),
+ "blue whale": (0x04, 0x2E, 0x4C),
+ "zuccini": (0x04, 0x40, 0x22),
+ "teal blue": (0x04, 0x42, 0x59),
+ "deep cove": (0x05, 0x10, 0x40),
+ "gulf blue": (0x05, 0x16, 0x57),
+ "venice blue": (0x05, 0x59, 0x89),
+ "watercourse": (0x05, 0x6F, 0x57),
+ "catalina blue": (0x06, 0x2A, 0x78),
+ "tiber": (0x06, 0x35, 0x37),
+ "gossamer": (0x06, 0x9B, 0x81),
+ "niagara": (0x06, 0xA1, 0x89),
+ "tarawera": (0x07, 0x3A, 0x50),
+ "jaguar": (0x08, 0x01, 0x10),
+ "black bean": (0x08, 0x19, 0x10),
+ "deep sapphire": (0x08, 0x25, 0x67),
+ "elf green": (0x08, 0x83, 0x70),
+ "bright turquoise": (0x08, 0xE8, 0xDE),
+ "downriver": (0x09, 0x22, 0x56),
+ "palm green": (0x09, 0x23, 0x0F),
+ "madison": (0x09, 0x25, 0x5D),
+ "bottle green": (0x09, 0x36, 0x24),
+ "deep sea green": (0x09, 0x58, 0x59),
+ "salem": (0x09, 0x7F, 0x4B),
+ "black russian": (0x0A, 0x00, 0x1C),
+ "dark fern": (0x0A, 0x48, 0x0D),
+ "japanese laurel": (0x0A, 0x69, 0x06),
+ "atoll": (0x0A, 0x6F, 0x75),
+ "cod gray": (0x0B, 0x0B, 0x0B),
+ "marshland": (0x0B, 0x0F, 0x08),
+ "gordons green": (0x0B, 0x11, 0x07),
+ "black forest": (0x0B, 0x13, 0x04),
+ "san felix": (0x0B, 0x62, 0x07),
+ "malachite": (0x0B, 0xDA, 0x51),
+ "ebony": (0x0C, 0x0B, 0x1D),
+ "woodsmoke": (0x0C, 0x0D, 0x0F),
+ "racing green": (0x0C, 0x19, 0x11),
+ "surfie green": (0x0C, 0x7A, 0x79),
+ "blue chill": (0x0C, 0x89, 0x90),
+ "black rock": (0x0D, 0x03, 0x32),
+ "bunker": (0x0D, 0x11, 0x17),
+ "aztec": (0x0D, 0x1C, 0x19),
+ "bush": (0x0D, 0x2E, 0x1C),
+ "cinder": (0x0E, 0x0E, 0x18),
+ "firefly": (0x0E, 0x2A, 0x30),
+ "torea bay": (0x0F, 0x2D, 0x9E),
+ "vulcan": (0x10, 0x12, 0x1D),
+ "green waterloo": (0x10, 0x14, 0x05),
+ "eden": (0x10, 0x58, 0x52),
+ "arapawa": (0x11, 0x0C, 0x6C),
+ "ultramarine": (0x12, 0x0A, 0x8F),
+ "elephant": (0x12, 0x34, 0x47),
+ "jewel": (0x12, 0x6B, 0x40),
+ "diesel": (0x13, 0x00, 0x00),
+ "asphalt": (0x13, 0x0A, 0x06),
+ "blue zodiac": (0x13, 0x26, 0x4D),
+ "parsley": (0x13, 0x4F, 0x19),
+ "nero": (0x14, 0x06, 0x00),
+ "tory blue": (0x14, 0x50, 0xAA),
+ "bunting": (0x15, 0x1F, 0x4C),
+ "denim": (0x15, 0x60, 0xBD),
+ "genoa": (0x15, 0x73, 0x6B),
+ "mirage": (0x16, 0x19, 0x28),
+ "hunter green": (0x16, 0x1D, 0x10),
+ "big stone": (0x16, 0x2A, 0x40),
+ "celtic": (0x16, 0x32, 0x22),
+ "timber green": (0x16, 0x32, 0x2C),
+ "gable green": (0x16, 0x35, 0x31),
+ "pine tree": (0x17, 0x1F, 0x04),
+ "chathams blue": (0x17, 0x55, 0x79),
+ "deep forest green": (0x18, 0x2D, 0x09),
+ "dark green": (0x18, 0x2D, 0x09),
+ "blumine": (0x18, 0x58, 0x7A),
+ "palm leaf": (0x19, 0x33, 0x0E),
+ "nile blue": (0x19, 0x37, 0x51),
+ "fun blue": (0x19, 0x59, 0xA8),
+ "lucky point": (0x1A, 0x1A, 0x68),
+ "mountain meadow": (0x1A, 0xB3, 0x85),
+ "tolopea": (0x1B, 0x02, 0x45),
+ "haiti": (0x1B, 0x10, 0x35),
+ "deep koamaru": (0x1B, 0x12, 0x7B),
+ "acadia": (0x1B, 0x14, 0x04),
+ "seaweed": (0x1B, 0x2F, 0x11),
+ "biscay": (0x1B, 0x31, 0x62),
+ "matisse": (0x1B, 0x65, 0x9D),
+ "crowshead": (0x1C, 0x12, 0x08),
+ "rangoon green": (0x1C, 0x1E, 0x13),
+ "persian blue": (0x1C, 0x39, 0xBB),
+ "everglade": (0x1C, 0x40, 0x2E),
+ "elm": (0x1C, 0x7C, 0x7D),
+ "green pea": (0x1D, 0x61, 0x42),
+ "creole": (0x1E, 0x0F, 0x04),
+ "karaka": (0x1E, 0x16, 0x09),
+ "el paso": (0x1E, 0x17, 0x08),
+ "cello": (0x1E, 0x38, 0x5B),
+ "te papa green": (0x1E, 0x43, 0x3C),
+ "dodger blue": (0x1E, 0x90, 0xFF),
+ "eastern blue": (0x1E, 0x9A, 0xB0),
+ "night rider": (0x1F, 0x12, 0x0F),
+ "java": (0x1F, 0xC2, 0xC2),
+ "jacksons purple": (0x20, 0x20, 0x8D),
+ "cloud burst": (0x20, 0x2E, 0x54),
+ "blue dianne": (0x20, 0x48, 0x52),
+ "eternity": (0x21, 0x1A, 0x0E),
+ "deep blue": (0x22, 0x08, 0x78),
+ "forest green": (0x22, 0x8B, 0x22),
+ "mallard": (0x23, 0x34, 0x18),
+ "violet": (0x24, 0x0A, 0x40),
+ "kilamanjaro": (0x24, 0x0C, 0x02),
+ "log cabin": (0x24, 0x2A, 0x1D),
+ "black olive": (0x24, 0x2E, 0x16),
+ "green house": (0x24, 0x50, 0x0F),
+ "graphite": (0x25, 0x16, 0x07),
+ "cannon black": (0x25, 0x17, 0x06),
+ "port gore": (0x25, 0x1F, 0x4F),
+ "shark": (0x25, 0x27, 0x2C),
+ "green kelp": (0x25, 0x31, 0x1C),
+ "curious blue": (0x25, 0x96, 0xD1),
+ "paua": (0x26, 0x03, 0x68),
+ "paris m": (0x26, 0x05, 0x6A),
+ "wood bark": (0x26, 0x11, 0x05),
+ "gondola": (0x26, 0x14, 0x14),
+ "steel gray": (0x26, 0x23, 0x35),
+ "light gray": (0x26, 0x23, 0x35),
+ "ebony clay": (0x26, 0x28, 0x3B),
+ "bay of many": (0x27, 0x3A, 0x81),
+ "plantation": (0x27, 0x50, 0x4B),
+ "eucalyptus": (0x27, 0x8A, 0x5B),
+ "oil": (0x28, 0x1E, 0x15),
+ "astronaut": (0x28, 0x3A, 0x77),
+ "mariner": (0x28, 0x6A, 0xCD),
+ "violent violet": (0x29, 0x0C, 0x5E),
+ "bastille": (0x29, 0x21, 0x30),
+ "zeus": (0x29, 0x23, 0x19),
+ "charade": (0x29, 0x29, 0x37),
+ "jelly bean": (0x29, 0x7B, 0x9A),
+ "jungle green": (0x29, 0xAB, 0x87),
+ "cherry pie": (0x2A, 0x03, 0x59),
+ "coffee bean": (0x2A, 0x14, 0x0E),
+ "baltic sea": (0x2A, 0x26, 0x30),
+ "turtle green": (0x2A, 0x38, 0x0B),
+ "cerulean blue": (0x2A, 0x52, 0xBE),
+ "sepia black": (0x2B, 0x02, 0x02),
+ "valhalla": (0x2B, 0x19, 0x4F),
+ "heavy metal": (0x2B, 0x32, 0x28),
+ "blue gem": (0x2C, 0x0E, 0x8C),
+ "revolver": (0x2C, 0x16, 0x32),
+ "bleached cedar": (0x2C, 0x21, 0x33),
+ "lochinvar": (0x2C, 0x8C, 0x84),
+ "mikado": (0x2D, 0x25, 0x10),
+ "outer space": (0x2D, 0x38, 0x3A),
+ "st tropaz": (0x2D, 0x56, 0x9B),
+ "jacaranda": (0x2E, 0x03, 0x29),
+ "jacko bean": (0x2E, 0x19, 0x05),
+ "rangitoto": (0x2E, 0x32, 0x22),
+ "rhino": (0x2E, 0x3F, 0x62),
+ "sea green": (0x2E, 0x8B, 0x57),
+ "scooter": (0x2E, 0xBF, 0xD4),
+ "onion": (0x2F, 0x27, 0x0E),
+ "governor bay": (0x2F, 0x3C, 0xB3),
+ "sapphire": (0x2F, 0x51, 0x9E),
+ "spectra": (0x2F, 0x5A, 0x57),
+ "casal": (0x2F, 0x61, 0x68),
+ "melanzane": (0x30, 0x05, 0x29),
+ "cocoa brown": (0x30, 0x1F, 0x1E),
+ "woodrush": (0x30, 0x2A, 0x0F),
+ "san juan": (0x30, 0x4B, 0x6A),
+ "turquoise": (0x30, 0xD5, 0xC8),
+ "eclipse": (0x31, 0x1C, 0x17),
+ "pickled bluewood": (0x31, 0x44, 0x59),
+ "azure": (0x31, 0x5B, 0xA1),
+ "calypso": (0x31, 0x72, 0x8D),
+ "paradiso": (0x31, 0x7D, 0x82),
+ "persian indigo": (0x32, 0x12, 0x7A),
+ "blackcurrant": (0x32, 0x29, 0x3A),
+ "mine shaft": (0x32, 0x32, 0x32),
+ "stromboli": (0x32, 0x5D, 0x52),
+ "bilbao": (0x32, 0x7C, 0x14),
+ "astral": (0x32, 0x7D, 0xA0),
+ "christalle": (0x33, 0x03, 0x6B),
+ "thunder": (0x33, 0x29, 0x2F),
+ "shamrock": (0x33, 0xCC, 0x99),
+ "tamarind": (0x34, 0x15, 0x15),
+ "mardi gras": (0x35, 0x00, 0x36),
+ "valentino": (0x35, 0x0E, 0x42),
+ "jagger": (0x35, 0x0E, 0x57),
+ "tuna": (0x35, 0x35, 0x42),
+ "chambray": (0x35, 0x4E, 0x8C),
+ "martinique": (0x36, 0x30, 0x50),
+ "tuatara": (0x36, 0x35, 0x34),
+ "waiouru": (0x36, 0x3C, 0x0D),
+ "ming": (0x36, 0x74, 0x7D),
+ "la palma": (0x36, 0x87, 0x16),
+ "chocolate": (0x37, 0x02, 0x02),
+ "clinker": (0x37, 0x1D, 0x09),
+ "brown tumbleweed": (0x37, 0x29, 0x0E),
+ "birch": (0x37, 0x30, 0x21),
+ "oracle": (0x37, 0x74, 0x75),
+ "blue diamond": (0x38, 0x04, 0x74),
+ "grape": (0x38, 0x1A, 0x51),
+ "dune": (0x38, 0x35, 0x33),
+ "oxford blue": (0x38, 0x45, 0x55),
+ "clover": (0x38, 0x49, 0x10),
+ "limed spruce": (0x39, 0x48, 0x51),
+ "dell": (0x39, 0x64, 0x13),
+ "toledo": (0x3A, 0x00, 0x20),
+ "sambuca": (0x3A, 0x20, 0x10),
+ "jacarta": (0x3A, 0x2A, 0x6A),
+ "william": (0x3A, 0x68, 0x6C),
+ "killarney": (0x3A, 0x6A, 0x47),
+ "keppel": (0x3A, 0xB0, 0x9E),
+ "temptress": (0x3B, 0x00, 0x0B),
+ "aubergine": (0x3B, 0x09, 0x10),
+ "jon": (0x3B, 0x1F, 0x1F),
+ "treehouse": (0x3B, 0x28, 0x20),
+ "amazon": (0x3B, 0x7A, 0x57),
+ "boston blue": (0x3B, 0x91, 0xB4),
+ "windsor": (0x3C, 0x08, 0x78),
+ "rebel": (0x3C, 0x12, 0x06),
+ "meteorite": (0x3C, 0x1F, 0x76),
+ "dark ebony": (0x3C, 0x20, 0x05),
+ "camouflage": (0x3C, 0x39, 0x10),
+ "bright gray": (0x3C, 0x41, 0x51),
+ "cape cod": (0x3C, 0x44, 0x43),
+ "lunar green": (0x3C, 0x49, 0x3A),
+ "bean ": (0x3D, 0x0C, 0x02),
+ "bistre": (0x3D, 0x2B, 0x1F),
+ "goblin": (0x3D, 0x7D, 0x52),
+ "kingfisher daisy": (0x3E, 0x04, 0x80),
+ "cedar": (0x3E, 0x1C, 0x14),
+ "english walnut": (0x3E, 0x2B, 0x23),
+ "black marlin": (0x3E, 0x2C, 0x1C),
+ "ship gray": (0x3E, 0x3A, 0x44),
+ "pelorous": (0x3E, 0xAB, 0xBF),
+ "bronze": (0x3F, 0x21, 0x09),
+ "cola": (0x3F, 0x25, 0x00),
+ "madras": (0x3F, 0x30, 0x02),
+ "minsk": (0x3F, 0x30, 0x7F),
+ "cabbage pont": (0x3F, 0x4C, 0x3A),
+ "tom thumb": (0x3F, 0x58, 0x3B),
+ "mineral green": (0x3F, 0x5D, 0x53),
+ "puerto rico": (0x3F, 0xC1, 0xAA),
+ "harlequin": (0x3F, 0xFF, 0x00),
+ "brown pod": (0x40, 0x18, 0x01),
+ "cork": (0x40, 0x29, 0x1D),
+ "masala": (0x40, 0x3B, 0x38),
+ "thatch green": (0x40, 0x3D, 0x19),
+ "fiord": (0x40, 0x51, 0x69),
+ "viridian": (0x40, 0x82, 0x6D),
+ "chateau green": (0x40, 0xA8, 0x60),
+ "ripe plum": (0x41, 0x00, 0x56),
+ "paco": (0x41, 0x1F, 0x10),
+ "deep oak": (0x41, 0x20, 0x10),
+ "merlin": (0x41, 0x3C, 0x37),
+ "gun powder": (0x41, 0x42, 0x57),
+ "east bay": (0x41, 0x4C, 0x7D),
+ "royal blue": (0x41, 0x69, 0xE1),
+ "ocean green": (0x41, 0xAA, 0x78),
+ "burnt maroon": (0x42, 0x03, 0x03),
+ "lisbon brown": (0x42, 0x39, 0x21),
+ "faded jade": (0x42, 0x79, 0x77),
+ "scarlet gum": (0x43, 0x15, 0x60),
+ "iroko": (0x43, 0x31, 0x20),
+ "armadillo": (0x43, 0x3E, 0x37),
+ "river bed": (0x43, 0x4C, 0x59),
+ "green leaf": (0x43, 0x6A, 0x0D),
+ "barossa": (0x44, 0x01, 0x2D),
+ "morocco brown": (0x44, 0x1D, 0x00),
+ "mako": (0x44, 0x49, 0x54),
+ "kelp": (0x45, 0x49, 0x36),
+ "san marino": (0x45, 0x6C, 0xAC),
+ "picton blue": (0x45, 0xB1, 0xE8),
+ "loulou": (0x46, 0x0B, 0x41),
+ "crater brown": (0x46, 0x24, 0x25),
+ "gray asparagus": (0x46, 0x59, 0x45),
+ "steel blue": (0x46, 0x82, 0xB4),
+ "rustic red": (0x48, 0x04, 0x04),
+ "bulgarian rose": (0x48, 0x06, 0x07),
+ "clairvoyant": (0x48, 0x06, 0x56),
+ "cocoa bean": (0x48, 0x1C, 0x1C),
+ "woody brown": (0x48, 0x31, 0x31),
+ "taupe": (0x48, 0x3C, 0x32),
+ "van cleef": (0x49, 0x17, 0x0C),
+ "brown derby": (0x49, 0x26, 0x15),
+ "metallic bronze": (0x49, 0x37, 0x1B),
+ "verdun green": (0x49, 0x54, 0x00),
+ "blue bayoux": (0x49, 0x66, 0x79),
+ "bismark": (0x49, 0x71, 0x83),
+ "bracken": (0x4A, 0x2A, 0x04),
+ "deep bronze": (0x4A, 0x30, 0x04),
+ "mondo": (0x4A, 0x3C, 0x30),
+ "tundora": (0x4A, 0x42, 0x44),
+ "gravel": (0x4A, 0x44, 0x4B),
+ "trout": (0x4A, 0x4E, 0x5A),
+ "pigment indigo": (0x4B, 0x00, 0x82),
+ "nandor": (0x4B, 0x5D, 0x52),
+ "saddle": (0x4C, 0x30, 0x24),
+ "abbey": (0x4C, 0x4F, 0x56),
+ "blackberry": (0x4D, 0x01, 0x35),
+ "cab sav": (0x4D, 0x0A, 0x18),
+ "indian tan": (0x4D, 0x1E, 0x01),
+ "cowboy": (0x4D, 0x28, 0x2D),
+ "livid brown": (0x4D, 0x28, 0x2E),
+ "rock": (0x4D, 0x38, 0x33),
+ "punga": (0x4D, 0x3D, 0x14),
+ "bronzetone": (0x4D, 0x40, 0x0F),
+ "woodland": (0x4D, 0x53, 0x28),
+ "mahogany": (0x4E, 0x06, 0x06),
+ "bossanova": (0x4E, 0x2A, 0x5A),
+ "matterhorn": (0x4E, 0x3B, 0x41),
+ "bronze olive": (0x4E, 0x42, 0x0C),
+ "mulled wine": (0x4E, 0x45, 0x62),
+ "axolotl": (0x4E, 0x66, 0x49),
+ "wedgewood": (0x4E, 0x7F, 0x9E),
+ "shakespeare": (0x4E, 0xAB, 0xD1),
+ "honey flower": (0x4F, 0x1C, 0x70),
+ "daisy bush": (0x4F, 0x23, 0x98),
+ "indigo": (0x4F, 0x69, 0xC6),
+ "fern green": (0x4F, 0x79, 0x42),
+ "fruit salad": (0x4F, 0x9D, 0x5D),
+ "apple": (0x4F, 0xA8, 0x3D),
+ "mortar": (0x50, 0x43, 0x51),
+ "kashmir blue": (0x50, 0x70, 0x96),
+ "cutty sark": (0x50, 0x76, 0x72),
+ "emerald": (0x50, 0xC8, 0x78),
+ "emperor": (0x51, 0x46, 0x49),
+ "chalet green": (0x51, 0x6E, 0x3D),
+ "como": (0x51, 0x7C, 0x66),
+ "smalt blue": (0x51, 0x80, 0x8F),
+ "castro": (0x52, 0x00, 0x1F),
+ "maroon oak": (0x52, 0x0C, 0x17),
+ "gigas": (0x52, 0x3C, 0x94),
+ "voodoo": (0x53, 0x34, 0x55),
+ "victoria": (0x53, 0x44, 0x91),
+ "hippie green": (0x53, 0x82, 0x4B),
+ "heath": (0x54, 0x10, 0x12),
+ "judge gray": (0x54, 0x43, 0x33),
+ "fuscous gray": (0x54, 0x53, 0x4D),
+ "vida loca": (0x54, 0x90, 0x19),
+ "cioccolato": (0x55, 0x28, 0x0C),
+ "saratoga": (0x55, 0x5B, 0x10),
+ "finlandia": (0x55, 0x6D, 0x56),
+ "havelock blue": (0x55, 0x90, 0xD9),
+ "fountain blue": (0x56, 0xB4, 0xBE),
+ "spring leaves": (0x57, 0x83, 0x63),
+ "saddle brown": (0x58, 0x34, 0x01),
+ "scarpa flow": (0x58, 0x55, 0x62),
+ "cactus": (0x58, 0x71, 0x56),
+ "hippie blue": (0x58, 0x9A, 0xAF),
+ "wine berry": (0x59, 0x1D, 0x35),
+ "brown bramble": (0x59, 0x28, 0x04),
+ "congo brown": (0x59, 0x37, 0x37),
+ "millbrook": (0x59, 0x44, 0x33),
+ "waikawa gray": (0x5A, 0x6E, 0x9C),
+ "horizon": (0x5A, 0x87, 0xA0),
+ "jambalaya": (0x5B, 0x30, 0x13),
+ "bordeaux": (0x5C, 0x01, 0x20),
+ "mulberry wood": (0x5C, 0x05, 0x36),
+ "carnaby tan": (0x5C, 0x2E, 0x01),
+ "comet": (0x5C, 0x5D, 0x75),
+ "redwood": (0x5D, 0x1E, 0x0F),
+ "don juan": (0x5D, 0x4C, 0x51),
+ "chicago": (0x5D, 0x5C, 0x58),
+ "verdigris": (0x5D, 0x5E, 0x37),
+ "dingley": (0x5D, 0x77, 0x47),
+ "breaker bay": (0x5D, 0xA1, 0x9F),
+ "kabul": (0x5E, 0x48, 0x3E),
+ "hemlock": (0x5E, 0x5D, 0x3B),
+ "irish coffee": (0x5F, 0x3D, 0x26),
+ "mid gray": (0x5F, 0x5F, 0x6E),
+ "shuttle gray": (0x5F, 0x66, 0x72),
+ "aqua forest": (0x5F, 0xA7, 0x77),
+ "tradewind": (0x5F, 0xB3, 0xAC),
+ "horses neck": (0x60, 0x49, 0x13),
+ "smoky": (0x60, 0x5B, 0x73),
+ "corduroy": (0x60, 0x6E, 0x68),
+ "danube": (0x60, 0x93, 0xD1),
+ "espresso": (0x61, 0x27, 0x18),
+ "eggplant": (0x61, 0x40, 0x51),
+ "costa del sol": (0x61, 0x5D, 0x30),
+ "glade green": (0x61, 0x84, 0x5F),
+ "buccaneer": (0x62, 0x2F, 0x30),
+ "quincy": (0x62, 0x3F, 0x2D),
+ "butterfly bush": (0x62, 0x4E, 0x9A),
+ "west coast": (0x62, 0x51, 0x19),
+ "finch": (0x62, 0x66, 0x49),
+ "patina": (0x63, 0x9A, 0x8F),
+ "fern": (0x63, 0xB7, 0x6C),
+ "blue violet": (0x64, 0x56, 0xB7),
+ "dolphin": (0x64, 0x60, 0x77),
+ "storm dust": (0x64, 0x64, 0x63),
+ "siam": (0x64, 0x6A, 0x54),
+ "nevada": (0x64, 0x6E, 0x75),
+ "cornflower blue": (0x64, 0x95, 0xED),
+ "viking": (0x64, 0xCC, 0xDB),
+ "rosewood": (0x65, 0x00, 0x0B),
+ "cherrywood": (0x65, 0x1A, 0x14),
+ "purple heart": (0x65, 0x2D, 0xC1),
+ "fern frond": (0x65, 0x72, 0x20),
+ "willow grove": (0x65, 0x74, 0x5D),
+ "hoki": (0x65, 0x86, 0x9F),
+ "pompadour": (0x66, 0x00, 0x45),
+ "purple": (0x66, 0x00, 0x99),
+ "dark purple": (0x36, 0x00, 0x79),
+ "tyrian purple": (0x66, 0x02, 0x3C),
+ "dark tan": (0x66, 0x10, 0x10),
+ "silver tree": (0x66, 0xB5, 0x8F),
+ "bright green": (0x66, 0xFF, 0x00),
+ "screamin' green": (0x66, 0xFF, 0x66),
+ "black rose": (0x67, 0x03, 0x2D),
+ "scampi": (0x67, 0x5F, 0xA6),
+ "ironside gray": (0x67, 0x66, 0x62),
+ "viridian green": (0x67, 0x89, 0x75),
+ "christi": (0x67, 0xA7, 0x12),
+ "nutmeg wood finish": (0x68, 0x36, 0x00),
+ "zambezi": (0x68, 0x55, 0x58),
+ "salt box": (0x68, 0x5E, 0x6E),
+ "tawny port": (0x69, 0x25, 0x45),
+ "finn": (0x69, 0x2D, 0x54),
+ "scorpion": (0x69, 0x5F, 0x62),
+ "lynch": (0x69, 0x7E, 0x9A),
+ "spice": (0x6A, 0x44, 0x2E),
+ "himalaya": (0x6A, 0x5D, 0x1B),
+ "soya bean": (0x6A, 0x60, 0x51),
+ "hairy heath": (0x6B, 0x2A, 0x14),
+ "royal purple": (0x6B, 0x3F, 0xA0),
+ "shingle fawn": (0x6B, 0x4E, 0x31),
+ "dorado": (0x6B, 0x57, 0x55),
+ "bermuda gray": (0x6B, 0x8B, 0xA2),
+ "olive drab": (0x6B, 0x8E, 0x23),
+ "eminence": (0x6C, 0x30, 0x82),
+ "turquoise blue": (0x6C, 0xDA, 0xE7),
+ "lonestar": (0x6D, 0x01, 0x01),
+ "pine cone": (0x6D, 0x5E, 0x54),
+ "dove gray": (0x6D, 0x6C, 0x6C),
+ "juniper": (0x6D, 0x92, 0x92),
+ "gothic": (0x6D, 0x92, 0xA1),
+ "red oxide": (0x6E, 0x09, 0x02),
+ "moccaccino": (0x6E, 0x1D, 0x14),
+ "pickled bean": (0x6E, 0x48, 0x26),
+ "dallas": (0x6E, 0x4B, 0x26),
+ "kokoda": (0x6E, 0x6D, 0x57),
+ "pale sky": (0x6E, 0x77, 0x83),
+ "cafe royale": (0x6F, 0x44, 0x0C),
+ "flint": (0x6F, 0x6A, 0x61),
+ "highland": (0x6F, 0x8E, 0x63),
+ "limeade": (0x6F, 0x9D, 0x02),
+ "downy": (0x6F, 0xD0, 0xC5),
+ "persian plum": (0x70, 0x1C, 0x1C),
+ "sepia": (0x70, 0x42, 0x14),
+ "antique bronze": (0x70, 0x4A, 0x07),
+ "ferra": (0x70, 0x4F, 0x50),
+ "coffee": (0x70, 0x65, 0x55),
+ "slate gray": (0x70, 0x80, 0x90),
+ "cedar wood finish": (0x71, 0x1A, 0x00),
+ "metallic copper": (0x71, 0x29, 0x1D),
+ "affair": (0x71, 0x46, 0x93),
+ "studio": (0x71, 0x4A, 0xB2),
+ "tobacco brown": (0x71, 0x5D, 0x47),
+ "yellow metal": (0x71, 0x63, 0x38),
+ "peat": (0x71, 0x6B, 0x56),
+ "olivetone": (0x71, 0x6E, 0x10),
+ "storm gray": (0x71, 0x74, 0x86),
+ "sirocco": (0x71, 0x80, 0x80),
+ "aquamarine blue": (0x71, 0xD9, 0xE2),
+ "venetian red": (0x72, 0x01, 0x0F),
+ "old copper": (0x72, 0x4A, 0x2F),
+ "go ben": (0x72, 0x6D, 0x4E),
+ "raven": (0x72, 0x7B, 0x89),
+ "seance": (0x73, 0x1E, 0x8F),
+ "raw umber": (0x73, 0x4A, 0x12),
+ "kimberly": (0x73, 0x6C, 0x9F),
+ "crocodile": (0x73, 0x6D, 0x58),
+ "crete": (0x73, 0x78, 0x29),
+ "xanadu": (0x73, 0x86, 0x78),
+ "spicy mustard": (0x74, 0x64, 0x0D),
+ "limed ash": (0x74, 0x7D, 0x63),
+ "rolling stone": (0x74, 0x7D, 0x83),
+ "blue smoke": (0x74, 0x88, 0x81),
+ "laurel": (0x74, 0x93, 0x78),
+ "mantis": (0x74, 0xC3, 0x65),
+ "russett": (0x75, 0x5A, 0x57),
+ "deluge": (0x75, 0x63, 0xA8),
+ "cosmic": (0x76, 0x39, 0x5D),
+ "blue marguerite": (0x76, 0x66, 0xC6),
+ "lima": (0x76, 0xBD, 0x17),
+ "sky blue": (0x76, 0xD7, 0xEA),
+ "dark burgundy": (0x77, 0x0F, 0x05),
+ "crown of thorns": (0x77, 0x1F, 0x1F),
+ "walnut": (0x77, 0x3F, 0x1A),
+ "pablo": (0x77, 0x6F, 0x61),
+ "pacifika": (0x77, 0x81, 0x20),
+ "oxley": (0x77, 0x9E, 0x86),
+ "pastel green": (0x77, 0xDD, 0x77),
+ "japanese maple": (0x78, 0x01, 0x09),
+ "mocha": (0x78, 0x2D, 0x19),
+ "peanut": (0x78, 0x2F, 0x16),
+ "camouflage green": (0x78, 0x86, 0x6B),
+ "wasabi": (0x78, 0x8A, 0x25),
+ "ship cove": (0x78, 0x8B, 0xBA),
+ "sea nymph": (0x78, 0xA3, 0x9C),
+ "roman coffee": (0x79, 0x5D, 0x4C),
+ "old lavender": (0x79, 0x68, 0x78),
+ "rum": (0x79, 0x69, 0x89),
+ "fedora": (0x79, 0x6A, 0x78),
+ "sandstone": (0x79, 0x6D, 0x62),
+ "spray": (0x79, 0xDE, 0xEC),
+ "siren": (0x7A, 0x01, 0x3A),
+ "fuchsia blue": (0x7A, 0x58, 0xC1),
+ "boulder": (0x7A, 0x7A, 0x7A),
+ "wild blue yonder": (0x7A, 0x89, 0xB8),
+ "de york": (0x7A, 0xC4, 0x88),
+ "red beech": (0x7B, 0x38, 0x01),
+ "cinnamon": (0x7B, 0x3F, 0x00),
+ "yukon gold": (0x7B, 0x66, 0x08),
+ "tapa": (0x7B, 0x78, 0x74),
+ "waterloo ": (0x7B, 0x7C, 0x94),
+ "flax smoke": (0x7B, 0x82, 0x65),
+ "amulet": (0x7B, 0x9F, 0x80),
+ "asparagus": (0x7B, 0xA0, 0x5B),
+ "kenyan copper": (0x7C, 0x1C, 0x05),
+ "pesto": (0x7C, 0x76, 0x31),
+ "topaz": (0x7C, 0x77, 0x8A),
+ "concord": (0x7C, 0x7B, 0x7A),
+ "jumbo": (0x7C, 0x7B, 0x82),
+ "trendy green": (0x7C, 0x88, 0x1A),
+ "gumbo": (0x7C, 0xA1, 0xA6),
+ "acapulco": (0x7C, 0xB0, 0xA1),
+ "neptune": (0x7C, 0xB7, 0xBB),
+ "pueblo": (0x7D, 0x2C, 0x14),
+ "bay leaf": (0x7D, 0xA9, 0x8D),
+ "malibu": (0x7D, 0xC8, 0xF7),
+ "bermuda": (0x7D, 0xD8, 0xC6),
+ "copper canyon": (0x7E, 0x3A, 0x15),
+ "claret": (0x7F, 0x17, 0x34),
+ "peru tan": (0x7F, 0x3A, 0x02),
+ "falcon": (0x7F, 0x62, 0x6D),
+ "mobster": (0x7F, 0x75, 0x89),
+ "moody blue": (0x7F, 0x76, 0xD3),
+ "chartreuse": (0x7F, 0xFF, 0x00),
+ "aquamarine": (0x7F, 0xFF, 0xD4),
+ "maroon": (0x80, 0x00, 0x00),
+ "rose bud cherry": (0x80, 0x0B, 0x47),
+ "falu red": (0x80, 0x18, 0x18),
+ "red robin": (0x80, 0x34, 0x1F),
+ "vivid violet": (0x80, 0x37, 0x90),
+ "russet": (0x80, 0x46, 0x1B),
+ "friar gray": (0x80, 0x7E, 0x79),
+ "olive": (0x80, 0x80, 0x00),
+ "gray": (0x80, 0x80, 0x80),
+ "gulf stream": (0x80, 0xB3, 0xAE),
+ "glacier": (0x80, 0xB3, 0xC4),
+ "seagull": (0x80, 0xCC, 0xEA),
+ "nutmeg": (0x81, 0x42, 0x2C),
+ "spicy pink": (0x81, 0x6E, 0x71),
+ "empress": (0x81, 0x73, 0x77),
+ "spanish green": (0x81, 0x98, 0x85),
+ "sand dune": (0x82, 0x6F, 0x65),
+ "gunsmoke": (0x82, 0x86, 0x85),
+ "battleship gray": (0x82, 0x8F, 0x72),
+ "merlot": (0x83, 0x19, 0x23),
+ "shadow": (0x83, 0x70, 0x50),
+ "chelsea cucumber": (0x83, 0xAA, 0x5D),
+ "monte carlo": (0x83, 0xD0, 0xC6),
+ "plum": (0x84, 0x31, 0x79),
+ "granny smith": (0x84, 0xA0, 0xA0),
+ "chetwode blue": (0x85, 0x81, 0xD9),
+ "bandicoot": (0x85, 0x84, 0x70),
+ "bali hai": (0x85, 0x9F, 0xAF),
+ "half baked": (0x85, 0xC4, 0xCC),
+ "red devil": (0x86, 0x01, 0x11),
+ "lotus": (0x86, 0x3C, 0x3C),
+ "ironstone": (0x86, 0x48, 0x3C),
+ "bull shot": (0x86, 0x4D, 0x1E),
+ "rusty nail": (0x86, 0x56, 0x0A),
+ "bitter": (0x86, 0x89, 0x74),
+ "regent gray": (0x86, 0x94, 0x9F),
+ "disco": (0x87, 0x15, 0x50),
+ "americano": (0x87, 0x75, 0x6E),
+ "hurricane": (0x87, 0x7C, 0x7B),
+ "oslo gray": (0x87, 0x8D, 0x91),
+ "sushi": (0x87, 0xAB, 0x39),
+ "spicy mix": (0x88, 0x53, 0x42),
+ "kumera": (0x88, 0x62, 0x21),
+ "suva gray": (0x88, 0x83, 0x87),
+ "avocado": (0x88, 0x8D, 0x65),
+ "camelot": (0x89, 0x34, 0x56),
+ "solid pink": (0x89, 0x38, 0x43),
+ "cannon pink": (0x89, 0x43, 0x67),
+ "makara": (0x89, 0x7D, 0x6D),
+ "burnt umber": (0x8A, 0x33, 0x24),
+ "true v": (0x8A, 0x73, 0xD6),
+ "clay creek": (0x8A, 0x83, 0x60),
+ "monsoon": (0x8A, 0x83, 0x89),
+ "stack": (0x8A, 0x8F, 0x8A),
+ "jordy blue": (0x8A, 0xB9, 0xF1),
+ "electric violet": (0x8B, 0x00, 0xFF),
+ "monarch": (0x8B, 0x07, 0x23),
+ "corn harvest": (0x8B, 0x6B, 0x0B),
+ "olive haze": (0x8B, 0x84, 0x70),
+ "schooner": (0x8B, 0x84, 0x7E),
+ "natural gray": (0x8B, 0x86, 0x80),
+ "mantle": (0x8B, 0x9C, 0x90),
+ "portage": (0x8B, 0x9F, 0xEE),
+ "envy": (0x8B, 0xA6, 0x90),
+ "cascade": (0x8B, 0xA9, 0xA5),
+ "riptide": (0x8B, 0xE6, 0xD8),
+ "cardinal pink": (0x8C, 0x05, 0x5E),
+ "mule fawn": (0x8C, 0x47, 0x2F),
+ "potters clay": (0x8C, 0x57, 0x38),
+ "trendy pink": (0x8C, 0x64, 0x95),
+ "paprika": (0x8D, 0x02, 0x26),
+ "sanguine brown": (0x8D, 0x3D, 0x38),
+ "tosca": (0x8D, 0x3F, 0x3F),
+ "cement": (0x8D, 0x76, 0x62),
+ "granite green": (0x8D, 0x89, 0x74),
+ "manatee": (0x8D, 0x90, 0xA1),
+ "polo blue": (0x8D, 0xA8, 0xCC),
+ "red berry": (0x8E, 0x00, 0x00),
+ "rope": (0x8E, 0x4D, 0x1E),
+ "opium": (0x8E, 0x6F, 0x70),
+ "domino": (0x8E, 0x77, 0x5E),
+ "mamba": (0x8E, 0x81, 0x90),
+ "nepal": (0x8E, 0xAB, 0xC1),
+ "pohutukawa": (0x8F, 0x02, 0x1C),
+ "el salva": (0x8F, 0x3E, 0x33),
+ "korma": (0x8F, 0x4B, 0x0E),
+ "squirrel": (0x8F, 0x81, 0x76),
+ "vista blue": (0x8F, 0xD6, 0xB4),
+ "burgundy": (0x90, 0x00, 0x20),
+ "old brick": (0x90, 0x1E, 0x1E),
+ "hemp": (0x90, 0x78, 0x74),
+ "almond frost": (0x90, 0x7B, 0x71),
+ "sycamore": (0x90, 0x8D, 0x39),
+ "sangria": (0x92, 0x00, 0x0A),
+ "cumin": (0x92, 0x43, 0x21),
+ "beaver": (0x92, 0x6F, 0x5B),
+ "stonewall": (0x92, 0x85, 0x73),
+ "venus": (0x92, 0x85, 0x90),
+ "medium purple": (0x93, 0x70, 0xDB),
+ "cornflower": (0x93, 0xCC, 0xEA),
+ "algae green": (0x93, 0xDF, 0xB8),
+ "copper rust": (0x94, 0x47, 0x47),
+ "arrowtown": (0x94, 0x87, 0x71),
+ "scarlett": (0x95, 0x00, 0x15),
+ "strikemaster": (0x95, 0x63, 0x87),
+ "mountain mist": (0x95, 0x93, 0x96),
+ "carmine": (0x96, 0x00, 0x18),
+ "brown": (0x96, 0x4B, 0x00),
+ "leather": (0x96, 0x70, 0x59),
+ "purple mountain's majesty": (0x96, 0x78, 0xB6),
+ "lavender purple": (0x96, 0x7B, 0xB6),
+ "pewter": (0x96, 0xA8, 0xA1),
+ "summer green": (0x96, 0xBB, 0xAB),
+ "au chico": (0x97, 0x60, 0x5D),
+ "wisteria": (0x97, 0x71, 0xB5),
+ "atlantis": (0x97, 0xCD, 0x2D),
+ "vin rouge": (0x98, 0x3D, 0x61),
+ "lilac bush": (0x98, 0x74, 0xD3),
+ "bazaar": (0x98, 0x77, 0x7B),
+ "hacienda": (0x98, 0x81, 0x1B),
+ "pale oyster": (0x98, 0x8D, 0x77),
+ "mint green": (0x98, 0xFF, 0x98),
+ "fresh eggplant": (0x99, 0x00, 0x66),
+ "violet eggplant": (0x99, 0x11, 0x99),
+ "tamarillo": (0x99, 0x16, 0x13),
+ "totem pole": (0x99, 0x1B, 0x07),
+ "copper rose": (0x99, 0x66, 0x66),
+ "amethyst": (0x99, 0x66, 0xCC),
+ "mountbatten pink": (0x99, 0x7A, 0x8D),
+ "blue bell": (0x99, 0x99, 0xCC),
+ "prairie sand": (0x9A, 0x38, 0x20),
+ "toast": (0x9A, 0x6E, 0x61),
+ "gurkha": (0x9A, 0x95, 0x77),
+ "olivine": (0x9A, 0xB9, 0x73),
+ "shadow green": (0x9A, 0xC2, 0xB8),
+ "oregon": (0x9B, 0x47, 0x03),
+ "lemon grass": (0x9B, 0x9E, 0x8F),
+ "stiletto": (0x9C, 0x33, 0x36),
+ "hawaiian tan": (0x9D, 0x56, 0x16),
+ "gull gray": (0x9D, 0xAC, 0xB7),
+ "pistachio": (0x9D, 0xC2, 0x09),
+ "granny smith apple": (0x9D, 0xE0, 0x93),
+ "anakiwa": (0x9D, 0xE5, 0xFF),
+ "chelsea gem": (0x9E, 0x53, 0x02),
+ "sepia skin": (0x9E, 0x5B, 0x40),
+ "sage": (0x9E, 0xA5, 0x87),
+ "citron": (0x9E, 0xA9, 0x1F),
+ "rock blue": (0x9E, 0xB1, 0xCD),
+ "morning glory": (0x9E, 0xDE, 0xE0),
+ "cognac": (0x9F, 0x38, 0x1D),
+ "reef gold": (0x9F, 0x82, 0x1C),
+ "star dust": (0x9F, 0x9F, 0x9C),
+ "santas gray": (0x9F, 0xA0, 0xB1),
+ "sinbad": (0x9F, 0xD7, 0xD3),
+ "feijoa": (0x9F, 0xDD, 0x8C),
+ "tabasco": (0xA0, 0x27, 0x12),
+ "buttered rum": (0xA1, 0x75, 0x0D),
+ "hit gray": (0xA1, 0xAD, 0xB5),
+ "citrus": (0xA1, 0xC5, 0x0A),
+ "aqua island": (0xA1, 0xDA, 0xD7),
+ "water leaf": (0xA1, 0xE9, 0xDE),
+ "flirt": (0xA2, 0x00, 0x6D),
+ "rouge": (0xA2, 0x3B, 0x6C),
+ "cape palliser": (0xA2, 0x66, 0x45),
+ "gray chateau": (0xA2, 0xAA, 0xB3),
+ "edward": (0xA2, 0xAE, 0xAB),
+ "pharlap": (0xA3, 0x80, 0x7B),
+ "amethyst smoke": (0xA3, 0x97, 0xB4),
+ "blizzard blue": (0xA3, 0xE3, 0xED),
+ "delta": (0xA4, 0xA4, 0x9D),
+ "wistful": (0xA4, 0xA6, 0xD3),
+ "green smoke": (0xA4, 0xAF, 0x6E),
+ "jazzberry jam": (0xA5, 0x0B, 0x5E),
+ "zorba": (0xA5, 0x9B, 0x91),
+ "bahia": (0xA5, 0xCB, 0x0C),
+ "roof terracotta": (0xA6, 0x2F, 0x20),
+ "paarl": (0xA6, 0x55, 0x29),
+ "barley corn": (0xA6, 0x8B, 0x5B),
+ "donkey brown": (0xA6, 0x92, 0x79),
+ "dawn": (0xA6, 0xA2, 0x9A),
+ "mexican red": (0xA7, 0x25, 0x25),
+ "luxor gold": (0xA7, 0x88, 0x2C),
+ "rich gold": (0xA8, 0x53, 0x07),
+ "reno sand": (0xA8, 0x65, 0x15),
+ "coral tree": (0xA8, 0x6B, 0x6B),
+ "dusty gray": (0xA8, 0x98, 0x9B),
+ "dull lavender": (0xA8, 0x99, 0xE6),
+ "tallow": (0xA8, 0xA5, 0x89),
+ "bud": (0xA8, 0xAE, 0x9C),
+ "locust": (0xA8, 0xAF, 0x8E),
+ "norway": (0xA8, 0xBD, 0x9F),
+ "chinook": (0xA8, 0xE3, 0xBD),
+ "gray olive": (0xA9, 0xA4, 0x91),
+ "aluminium": (0xA9, 0xAC, 0xB6),
+ "cadet blue": (0xA9, 0xB2, 0xC3),
+ "schist": (0xA9, 0xB4, 0x97),
+ "tower gray": (0xA9, 0xBD, 0xBF),
+ "perano": (0xA9, 0xBE, 0xF2),
+ "opal": (0xA9, 0xC6, 0xC2),
+ "night shadz": (0xAA, 0x37, 0x5A),
+ "fire": (0xAA, 0x42, 0x03),
+ "muesli": (0xAA, 0x8B, 0x5B),
+ "sandal": (0xAA, 0x8D, 0x6F),
+ "shady lady": (0xAA, 0xA5, 0xA9),
+ "logan": (0xAA, 0xA9, 0xCD),
+ "spun pearl": (0xAA, 0xAB, 0xB7),
+ "regent st blue": (0xAA, 0xD6, 0xE6),
+ "magic mint": (0xAA, 0xF0, 0xD1),
+ "lipstick": (0xAB, 0x05, 0x63),
+ "royal heath": (0xAB, 0x34, 0x72),
+ "sandrift": (0xAB, 0x91, 0x7A),
+ "cold purple": (0xAB, 0xA0, 0xD9),
+ "bronco": (0xAB, 0xA1, 0x96),
+ "limed oak": (0xAC, 0x8A, 0x56),
+ "east side": (0xAC, 0x91, 0xCE),
+ "lemon ginger": (0xAC, 0x9E, 0x22),
+ "napa": (0xAC, 0xA4, 0x94),
+ "hillary": (0xAC, 0xA5, 0x86),
+ "cloudy": (0xAC, 0xA5, 0x9F),
+ "silver chalice": (0xAC, 0xAC, 0xAC),
+ "swamp green": (0xAC, 0xB7, 0x8E),
+ "spring rain": (0xAC, 0xCB, 0xB1),
+ "conifer": (0xAC, 0xDD, 0x4D),
+ "celadon": (0xAC, 0xE1, 0xAF),
+ "mandalay": (0xAD, 0x78, 0x1B),
+ "casper": (0xAD, 0xBE, 0xD1),
+ "moss green": (0xAD, 0xDF, 0xAD),
+ "padua": (0xAD, 0xE6, 0xC4),
+ "green yellow": (0xAD, 0xFF, 0x2F),
+ "hippie pink": (0xAE, 0x45, 0x60),
+ "desert": (0xAE, 0x60, 0x20),
+ "bouquet": (0xAE, 0x80, 0x9E),
+ "medium carmine": (0xAF, 0x40, 0x35),
+ "apple blossom": (0xAF, 0x4D, 0x43),
+ "brown rust": (0xAF, 0x59, 0x3E),
+ "driftwood": (0xAF, 0x87, 0x51),
+ "alpine": (0xAF, 0x8F, 0x2C),
+ "lucky": (0xAF, 0x9F, 0x1C),
+ "martini": (0xAF, 0xA0, 0x9E),
+ "bombay": (0xAF, 0xB1, 0xB8),
+ "pigeon post": (0xAF, 0xBD, 0xD9),
+ "cadillac": (0xB0, 0x4C, 0x6A),
+ "matrix": (0xB0, 0x5D, 0x54),
+ "tapestry": (0xB0, 0x5E, 0x81),
+ "mai tai": (0xB0, 0x66, 0x08),
+ "del rio": (0xB0, 0x9A, 0x95),
+ "powder blue": (0xB0, 0xE0, 0xE6),
+ "inch worm": (0xB0, 0xE3, 0x13),
+ "bright red": (0xB1, 0x00, 0x00),
+ "vesuvius": (0xB1, 0x4A, 0x0B),
+ "pumpkin skin": (0xB1, 0x61, 0x0B),
+ "santa fe": (0xB1, 0x6D, 0x52),
+ "teak": (0xB1, 0x94, 0x61),
+ "fringy flower": (0xB1, 0xE2, 0xC1),
+ "ice cold": (0xB1, 0xF4, 0xE7),
+ "shiraz": (0xB2, 0x09, 0x31),
+ "biloba flower": (0xB2, 0xA1, 0xEA),
+ "tall poppy": (0xB3, 0x2D, 0x29),
+ "fiery orange": (0xB3, 0x52, 0x13),
+ "hot toddy": (0xB3, 0x80, 0x07),
+ "taupe gray": (0xB3, 0xAF, 0x95),
+ "la rioja": (0xB3, 0xC1, 0x10),
+ "well read": (0xB4, 0x33, 0x32),
+ "blush": (0xB4, 0x46, 0x68),
+ "jungle mist": (0xB4, 0xCF, 0xD3),
+ "turkish rose": (0xB5, 0x72, 0x81),
+ "lavender": (0xB5, 0x7E, 0xDC),
+ "mongoose": (0xB5, 0xA2, 0x7F),
+ "olive green": (0xB5, 0xB3, 0x5C),
+ "jet stream": (0xB5, 0xD2, 0xCE),
+ "cruise": (0xB5, 0xEC, 0xDF),
+ "hibiscus": (0xB6, 0x31, 0x6C),
+ "thatch": (0xB6, 0x9D, 0x98),
+ "heathered gray": (0xB6, 0xB0, 0x95),
+ "eagle": (0xB6, 0xBA, 0xA4),
+ "spindle": (0xB6, 0xD1, 0xEA),
+ "gum leaf": (0xB6, 0xD3, 0xBF),
+ "rust": (0xB7, 0x41, 0x0E),
+ "muddy waters": (0xB7, 0x8E, 0x5C),
+ "sahara": (0xB7, 0xA2, 0x14),
+ "husk": (0xB7, 0xA4, 0x58),
+ "nobel": (0xB7, 0xB1, 0xB1),
+ "heather": (0xB7, 0xC3, 0xD0),
+ "madang": (0xB7, 0xF0, 0xBE),
+ "milano red": (0xB8, 0x11, 0x04),
+ "copper": (0xB8, 0x73, 0x33),
+ "gimblet": (0xB8, 0xB5, 0x6A),
+ "green spring": (0xB8, 0xC1, 0xB1),
+ "celery": (0xB8, 0xC2, 0x5D),
+ "sail": (0xB8, 0xE0, 0xF9),
+ "chestnut": (0xB9, 0x4E, 0x48),
+ "crail": (0xB9, 0x51, 0x40),
+ "marigold": (0xB9, 0x8D, 0x28),
+ "wild willow": (0xB9, 0xC4, 0x6A),
+ "rainee": (0xB9, 0xC8, 0xAC),
+ "guardsman red": (0xBA, 0x01, 0x01),
+ "rock spray": (0xBA, 0x45, 0x0C),
+ "bourbon": (0xBA, 0x6F, 0x1E),
+ "pirate gold": (0xBA, 0x7F, 0x03),
+ "nomad": (0xBA, 0xB1, 0xA2),
+ "submarine": (0xBA, 0xC7, 0xC9),
+ "charlotte": (0xBA, 0xEE, 0xF9),
+ "medium red violet": (0xBB, 0x33, 0x85),
+ "brandy rose": (0xBB, 0x89, 0x83),
+ "rio grande": (0xBB, 0xD0, 0x09),
+ "surf": (0xBB, 0xD7, 0xC1),
+ "powder ash": (0xBC, 0xC9, 0xC2),
+ "tuscany": (0xBD, 0x5E, 0x2E),
+ "quicksand": (0xBD, 0x97, 0x8E),
+ "silk": (0xBD, 0xB1, 0xA8),
+ "malta": (0xBD, 0xB2, 0xA1),
+ "chatelle": (0xBD, 0xB3, 0xC7),
+ "lavender gray": (0xBD, 0xBB, 0xD7),
+ "french gray": (0xBD, 0xBD, 0xC6),
+ "clay ash": (0xBD, 0xC8, 0xB3),
+ "loblolly": (0xBD, 0xC9, 0xCE),
+ "french pass": (0xBD, 0xED, 0xFD),
+ "london hue": (0xBE, 0xA6, 0xC3),
+ "pink swan": (0xBE, 0xB5, 0xB7),
+ "fuego": (0xBE, 0xDE, 0x0D),
+ "rose of sharon": (0xBF, 0x55, 0x00),
+ "tide": (0xBF, 0xB8, 0xB0),
+ "blue haze": (0xBF, 0xBE, 0xD8),
+ "silver sand": (0xBF, 0xC1, 0xC2),
+ "key lime pie": (0xBF, 0xC9, 0x21),
+ "ziggurat": (0xBF, 0xDB, 0xE2),
+ "lime": (0xBF, 0xFF, 0x00),
+ "thunderbird": (0xC0, 0x2B, 0x18),
+ "mojo": (0xC0, 0x47, 0x37),
+ "old rose": (0xC0, 0x80, 0x81),
+ "silver": (0xC0, 0xC0, 0xC0),
+ "pale leaf": (0xC0, 0xD3, 0xB9),
+ "pixie green": (0xC0, 0xD8, 0xB6),
+ "tia maria": (0xC1, 0x44, 0x0E),
+ "fuchsia pink": (0xC1, 0x54, 0xC1),
+ "buddha gold": (0xC1, 0xA0, 0x04),
+ "bison hide": (0xC1, 0xB7, 0xA4),
+ "tea": (0xC1, 0xBA, 0xB0),
+ "gray suit": (0xC1, 0xBE, 0xCD),
+ "sprout": (0xC1, 0xD7, 0xB0),
+ "sulu": (0xC1, 0xF0, 0x7C),
+ "indochine": (0xC2, 0x6B, 0x03),
+ "twine": (0xC2, 0x95, 0x5D),
+ "cotton seed": (0xC2, 0xBD, 0xB6),
+ "pumice": (0xC2, 0xCA, 0xC4),
+ "jagged ice": (0xC2, 0xE8, 0xE5),
+ "maroon flush": (0xC3, 0x21, 0x48),
+ "indian khaki": (0xC3, 0xB0, 0x91),
+ "pale slate": (0xC3, 0xBF, 0xC1),
+ "gray nickel": (0xC3, 0xC3, 0xBD),
+ "periwinkle gray": (0xC3, 0xCD, 0xE6),
+ "tiara": (0xC3, 0xD1, 0xD1),
+ "tropical blue": (0xC3, 0xDD, 0xF9),
+ "cardinal": (0xC4, 0x1E, 0x3A),
+ "fuzzy wuzzy brown": (0xC4, 0x56, 0x55),
+ "orange roughy": (0xC4, 0x57, 0x19),
+ "mist gray": (0xC4, 0xC4, 0xBC),
+ "coriander": (0xC4, 0xD0, 0xB0),
+ "mint tulip": (0xC4, 0xF4, 0xEB),
+ "mulberry": (0xC5, 0x4B, 0x8C),
+ "nugget": (0xC5, 0x99, 0x22),
+ "tussock": (0xC5, 0x99, 0x4B),
+ "sea mist": (0xC5, 0xDB, 0xCA),
+ "yellow green": (0xC5, 0xE1, 0x7A),
+ "brick red": (0xC6, 0x2D, 0x42),
+ "contessa": (0xC6, 0x72, 0x6B),
+ "oriental pink": (0xC6, 0x91, 0x91),
+ "roti": (0xC6, 0xA8, 0x4B),
+ "ash": (0xC6, 0xC3, 0xB5),
+ "kangaroo": (0xC6, 0xC8, 0xBD),
+ "las palmas": (0xC6, 0xE6, 0x10),
+ "monza": (0xC7, 0x03, 0x1E),
+ "red violet": (0xC7, 0x15, 0x85),
+ "coral reef": (0xC7, 0xBC, 0xA2),
+ "melrose": (0xC7, 0xC1, 0xFF),
+ "cloud": (0xC7, 0xC4, 0xBF),
+ "ghost": (0xC7, 0xC9, 0xD5),
+ "pine glade": (0xC7, 0xCD, 0x90),
+ "botticelli": (0xC7, 0xDD, 0xE5),
+ "antique brass": (0xC8, 0x8A, 0x65),
+ "lilac": (0xC8, 0xA2, 0xC8),
+ "hokey pokey": (0xC8, 0xA5, 0x28),
+ "lily": (0xC8, 0xAA, 0xBF),
+ "laser": (0xC8, 0xB5, 0x68),
+ "edgewater": (0xC8, 0xE3, 0xD7),
+ "piper": (0xC9, 0x63, 0x23),
+ "pizza": (0xC9, 0x94, 0x15),
+ "light wisteria": (0xC9, 0xA0, 0xDC),
+ "rodeo dust": (0xC9, 0xB2, 0x9B),
+ "sundance": (0xC9, 0xB3, 0x5B),
+ "earls green": (0xC9, 0xB9, 0x3B),
+ "silver rust": (0xC9, 0xC0, 0xBB),
+ "conch": (0xC9, 0xD9, 0xD2),
+ "reef": (0xC9, 0xFF, 0xA2),
+ "aero blue": (0xC9, 0xFF, 0xE5),
+ "flush mahogany": (0xCA, 0x34, 0x35),
+ "turmeric": (0xCA, 0xBB, 0x48),
+ "paris white": (0xCA, 0xDC, 0xD4),
+ "bitter lemon": (0xCA, 0xE0, 0x0D),
+ "skeptic": (0xCA, 0xE6, 0xDA),
+ "viola": (0xCB, 0x8F, 0xA9),
+ "foggy gray": (0xCB, 0xCA, 0xB6),
+ "green mist": (0xCB, 0xD3, 0xB0),
+ "nebula": (0xCB, 0xDB, 0xD6),
+ "persian red": (0xCC, 0x33, 0x33),
+ "burnt orange": (0xCC, 0x55, 0x00),
+ "ochre": (0xCC, 0x77, 0x22),
+ "puce": (0xCC, 0x88, 0x99),
+ "thistle green": (0xCC, 0xCA, 0xA8),
+ "periwinkle": (0xCC, 0xCC, 0xFF),
+ "electric lime": (0xCC, 0xFF, 0x00),
+ "tenn": (0xCD, 0x57, 0x00),
+ "chestnut rose": (0xCD, 0x5C, 0x5C),
+ "brandy punch": (0xCD, 0x84, 0x29),
+ "onahau": (0xCD, 0xF4, 0xFF),
+ "sorrell brown": (0xCE, 0xB9, 0x8F),
+ "cold turkey": (0xCE, 0xBA, 0xBA),
+ "yuma": (0xCE, 0xC2, 0x91),
+ "chino": (0xCE, 0xC7, 0xA7),
+ "eunry": (0xCF, 0xA3, 0x9D),
+ "old gold": (0xCF, 0xB5, 0x3B),
+ "tasman": (0xCF, 0xDC, 0xCF),
+ "surf crest": (0xCF, 0xE5, 0xD2),
+ "humming bird": (0xCF, 0xF9, 0xF3),
+ "scandal": (0xCF, 0xFA, 0xF4),
+ "red stage": (0xD0, 0x5F, 0x04),
+ "hopbush": (0xD0, 0x6D, 0xA1),
+ "meteor": (0xD0, 0x7D, 0x12),
+ "perfume": (0xD0, 0xBE, 0xF8),
+ "prelude": (0xD0, 0xC0, 0xE5),
+ "tea green": (0xD0, 0xF0, 0xC0),
+ "geebung": (0xD1, 0x8F, 0x1B),
+ "vanilla": (0xD1, 0xBE, 0xA8),
+ "soft amber": (0xD1, 0xC6, 0xB4),
+ "celeste": (0xD1, 0xD2, 0xCA),
+ "mischka": (0xD1, 0xD2, 0xDD),
+ "pear": (0xD1, 0xE2, 0x31),
+ "hot cinnamon": (0xD2, 0x69, 0x1E),
+ "raw sienna": (0xD2, 0x7D, 0x46),
+ "careys pink": (0xD2, 0x9E, 0xAA),
+ "tan": (0xD2, 0xB4, 0x8C),
+ "deco": (0xD2, 0xDA, 0x97),
+ "blue romance": (0xD2, 0xF6, 0xDE),
+ "gossip": (0xD2, 0xF8, 0xB0),
+ "sisal": (0xD3, 0xCB, 0xBA),
+ "swirl": (0xD3, 0xCD, 0xC5),
+ "charm": (0xD4, 0x74, 0x94),
+ "clam shell": (0xD4, 0xB6, 0xAF),
+ "straw": (0xD4, 0xBF, 0x8D),
+ "akaroa": (0xD4, 0xC4, 0xA8),
+ "bird flower": (0xD4, 0xCD, 0x16),
+ "iron": (0xD4, 0xD7, 0xD9),
+ "geyser": (0xD4, 0xDF, 0xE2),
+ "hawkes blue": (0xD4, 0xE2, 0xFC),
+ "grenadier": (0xD5, 0x46, 0x00),
+ "can can": (0xD5, 0x91, 0xA4),
+ "whiskey": (0xD5, 0x9A, 0x6F),
+ "winter hazel": (0xD5, 0xD1, 0x95),
+ "granny apple": (0xD5, 0xF6, 0xE3),
+ "my pink": (0xD6, 0x91, 0x88),
+ "tacha": (0xD6, 0xC5, 0x62),
+ "moon raker": (0xD6, 0xCE, 0xF6),
+ "quill gray": (0xD6, 0xD6, 0xD1),
+ "snowy mint": (0xD6, 0xFF, 0xDB),
+ "new york pink": (0xD7, 0x83, 0x7F),
+ "pavlova": (0xD7, 0xC4, 0x98),
+ "fog": (0xD7, 0xD0, 0xFF),
+ "valencia": (0xD8, 0x44, 0x37),
+ "japonica": (0xD8, 0x7C, 0x63),
+ "thistle": (0xD8, 0xBF, 0xD8),
+ "maverick": (0xD8, 0xC2, 0xD5),
+ "foam": (0xD8, 0xFC, 0xFA),
+ "cabaret": (0xD9, 0x49, 0x72),
+ "burning sand": (0xD9, 0x93, 0x76),
+ "cameo": (0xD9, 0xB9, 0x9B),
+ "timberwolf": (0xD9, 0xD6, 0xCF),
+ "tana": (0xD9, 0xDC, 0xC1),
+ "link water": (0xD9, 0xE4, 0xF5),
+ "mabel": (0xD9, 0xF7, 0xFF),
+ "cerise": (0xDA, 0x32, 0x87),
+ "flame pea": (0xDA, 0x5B, 0x38),
+ "bamboo": (0xDA, 0x63, 0x04),
+ "red damask": (0xDA, 0x6A, 0x41),
+ "orchid": (0xDA, 0x70, 0xD6),
+ "copperfield": (0xDA, 0x8A, 0x67),
+ "golden grass": (0xDA, 0xA5, 0x20),
+ "zanah": (0xDA, 0xEC, 0xD6),
+ "iceberg": (0xDA, 0xF4, 0xF0),
+ "oyster bay": (0xDA, 0xFA, 0xFF),
+ "cranberry": (0xDB, 0x50, 0x79),
+ "petite orchid": (0xDB, 0x96, 0x90),
+ "di serria": (0xDB, 0x99, 0x5E),
+ "alto": (0xDB, 0xDB, 0xDB),
+ "frosted mint": (0xDB, 0xFF, 0xF8),
+ "crimson": (0xDC, 0x14, 0x3C),
+ "punch": (0xDC, 0x43, 0x33),
+ "galliano": (0xDC, 0xB2, 0x0C),
+ "blossom": (0xDC, 0xB4, 0xBC),
+ "wattle": (0xDC, 0xD7, 0x47),
+ "westar": (0xDC, 0xD9, 0xD2),
+ "moon mist": (0xDC, 0xDD, 0xCC),
+ "caper": (0xDC, 0xED, 0xB4),
+ "swans down": (0xDC, 0xF0, 0xEA),
+ "swiss coffee": (0xDD, 0xD6, 0xD5),
+ "white ice": (0xDD, 0xF9, 0xF1),
+ "cerise red": (0xDE, 0x31, 0x63),
+ "roman": (0xDE, 0x63, 0x60),
+ "tumbleweed": (0xDE, 0xA6, 0x81),
+ "gold tips": (0xDE, 0xBA, 0x13),
+ "brandy": (0xDE, 0xC1, 0x96),
+ "wafer": (0xDE, 0xCB, 0xC6),
+ "sapling": (0xDE, 0xD4, 0xA4),
+ "barberry": (0xDE, 0xD7, 0x17),
+ "beryl green": (0xDE, 0xE5, 0xC0),
+ "pattens blue": (0xDE, 0xF5, 0xFF),
+ "heliotrope": (0xDF, 0x73, 0xFF),
+ "apache": (0xDF, 0xBE, 0x6F),
+ "chenin": (0xDF, 0xCD, 0x6F),
+ "lola": (0xDF, 0xCF, 0xDB),
+ "willow brook": (0xDF, 0xEC, 0xDA),
+ "chartreuse yellow": (0xDF, 0xFF, 0x00),
+ "mauve": (0xE0, 0xB0, 0xFF),
+ "anzac": (0xE0, 0xB6, 0x46),
+ "harvest gold": (0xE0, 0xB9, 0x74),
+ "calico": (0xE0, 0xC0, 0x95),
+ "baby blue": (0xE0, 0xFF, 0xFF),
+ "sunglo": (0xE1, 0x68, 0x65),
+ "equator": (0xE1, 0xBC, 0x64),
+ "pink flare": (0xE1, 0xC0, 0xC8),
+ "periglacial blue": (0xE1, 0xE6, 0xD6),
+ "kidnapper": (0xE1, 0xEA, 0xD4),
+ "tara": (0xE1, 0xF6, 0xE8),
+ "mandy": (0xE2, 0x54, 0x65),
+ "terracotta": (0xE2, 0x72, 0x5B),
+ "golden bell": (0xE2, 0x89, 0x13),
+ "shocking": (0xE2, 0x92, 0xC0),
+ "dixie": (0xE2, 0x94, 0x18),
+ "light orchid": (0xE2, 0x9C, 0xD2),
+ "snuff": (0xE2, 0xD8, 0xED),
+ "mystic": (0xE2, 0xEB, 0xED),
+ "apple green": (0xE2, 0xF3, 0xEC),
+ "razzmatazz": (0xE3, 0x0B, 0x5C),
+ "alizarin crimson": (0xE3, 0x26, 0x36),
+ "cinnabar": (0xE3, 0x42, 0x34),
+ "cavern pink": (0xE3, 0xBE, 0xBE),
+ "peppermint": (0xE3, 0xF5, 0xE1),
+ "mindaro": (0xE3, 0xF9, 0x88),
+ "deep blush": (0xE4, 0x76, 0x98),
+ "gamboge": (0xE4, 0x9B, 0x0F),
+ "melanie": (0xE4, 0xC2, 0xD5),
+ "twilight": (0xE4, 0xCF, 0xDE),
+ "bone": (0xE4, 0xD1, 0xC0),
+ "sunflower": (0xE4, 0xD4, 0x22),
+ "grain brown": (0xE4, 0xD5, 0xB7),
+ "zombie": (0xE4, 0xD6, 0x9B),
+ "frostee": (0xE4, 0xF6, 0xE7),
+ "snow flurry": (0xE4, 0xFF, 0xD1),
+ "amaranth": (0xE5, 0x2B, 0x50),
+ "zest": (0xE5, 0x84, 0x1B),
+ "dust storm": (0xE5, 0xCC, 0xC9),
+ "stark white": (0xE5, 0xD7, 0xBD),
+ "hampton": (0xE5, 0xD8, 0xAF),
+ "bon jour": (0xE5, 0xE0, 0xE1),
+ "mercury": (0xE5, 0xE5, 0xE5),
+ "polar": (0xE5, 0xF9, 0xF6),
+ "trinidad": (0xE6, 0x4E, 0x03),
+ "gold sand": (0xE6, 0xBE, 0x8A),
+ "cashmere": (0xE6, 0xBE, 0xA5),
+ "double spanish white": (0xE6, 0xD7, 0xB9),
+ "satin linen": (0xE6, 0xE4, 0xD4),
+ "harp": (0xE6, 0xF2, 0xEA),
+ "off green": (0xE6, 0xF8, 0xF3),
+ "hint of green": (0xE6, 0xFF, 0xE9),
+ "tranquil": (0xE6, 0xFF, 0xFF),
+ "mango tango": (0xE7, 0x72, 0x00),
+ "christine": (0xE7, 0x73, 0x0A),
+ "tonys pink": (0xE7, 0x9F, 0x8C),
+ "kobi": (0xE7, 0x9F, 0xC4),
+ "rose fog": (0xE7, 0xBC, 0xB4),
+ "corn": (0xE7, 0xBF, 0x05),
+ "putty": (0xE7, 0xCD, 0x8C),
+ "gray nurse": (0xE7, 0xEC, 0xE6),
+ "lily white": (0xE7, 0xF8, 0xFF),
+ "bubbles": (0xE7, 0xFE, 0xFF),
+ "fire bush": (0xE8, 0x99, 0x28),
+ "shilo": (0xE8, 0xB9, 0xB3),
+ "pearl bush": (0xE8, 0xE0, 0xD5),
+ "green white": (0xE8, 0xEB, 0xE0),
+ "chrome white": (0xE8, 0xF1, 0xD4),
+ "gin": (0xE8, 0xF2, 0xEB),
+ "aqua squeeze": (0xE8, 0xF5, 0xF2),
+ "clementine": (0xE9, 0x6E, 0x00),
+ "burnt sienna": (0xE9, 0x74, 0x51),
+ "tahiti gold": (0xE9, 0x7C, 0x07),
+ "oyster pink": (0xE9, 0xCE, 0xCD),
+ "confetti": (0xE9, 0xD7, 0x5A),
+ "ebb": (0xE9, 0xE3, 0xE3),
+ "ottoman": (0xE9, 0xF8, 0xED),
+ "clear day": (0xE9, 0xFF, 0xFD),
+ "carissma": (0xEA, 0x88, 0xA8),
+ "porsche": (0xEA, 0xAE, 0x69),
+ "tulip tree": (0xEA, 0xB3, 0x3B),
+ "rob roy": (0xEA, 0xC6, 0x74),
+ "raffia": (0xEA, 0xDA, 0xB8),
+ "white rock": (0xEA, 0xE8, 0xD4),
+ "panache": (0xEA, 0xF6, 0xEE),
+ "solitude": (0xEA, 0xF6, 0xFF),
+ "aqua spring": (0xEA, 0xF9, 0xF5),
+ "dew": (0xEA, 0xFF, 0xFE),
+ "apricot": (0xEB, 0x93, 0x73),
+ "zinnwaldite": (0xEB, 0xC2, 0xAF),
+ "fuel yellow": (0xEC, 0xA9, 0x27),
+ "ronchi": (0xEC, 0xC5, 0x4E),
+ "french lilac": (0xEC, 0xC7, 0xEE),
+ "just right": (0xEC, 0xCD, 0xB9),
+ "wild rice": (0xEC, 0xE0, 0x90),
+ "fall green": (0xEC, 0xEB, 0xBD),
+ "aths special": (0xEC, 0xEB, 0xCE),
+ "starship": (0xEC, 0xF2, 0x45),
+ "red ribbon": (0xED, 0x0A, 0x3F),
+ "tango": (0xED, 0x7A, 0x1C),
+ "carrot orange": (0xED, 0x91, 0x21),
+ "sea pink": (0xED, 0x98, 0x9E),
+ "tacao": (0xED, 0xB3, 0x81),
+ "desert sand": (0xED, 0xC9, 0xAF),
+ "pancho": (0xED, 0xCD, 0xAB),
+ "chamois": (0xED, 0xDC, 0xB1),
+ "primrose": (0xED, 0xEA, 0x99),
+ "frost": (0xED, 0xF5, 0xDD),
+ "aqua haze": (0xED, 0xF5, 0xF5),
+ "zumthor": (0xED, 0xF6, 0xFF),
+ "narvik": (0xED, 0xF9, 0xF1),
+ "honeysuckle": (0xED, 0xFC, 0x84),
+ "lavender magenta": (0xEE, 0x82, 0xEE),
+ "beauty bush": (0xEE, 0xC1, 0xBE),
+ "chalky": (0xEE, 0xD7, 0x94),
+ "almond": (0xEE, 0xD9, 0xC4),
+ "flax": (0xEE, 0xDC, 0x82),
+ "bizarre": (0xEE, 0xDE, 0xDA),
+ "double colonial white": (0xEE, 0xE3, 0xAD),
+ "cararra": (0xEE, 0xEE, 0xE8),
+ "manz": (0xEE, 0xEF, 0x78),
+ "tahuna sands": (0xEE, 0xF0, 0xC8),
+ "athens gray": (0xEE, 0xF0, 0xF3),
+ "tusk": (0xEE, 0xF3, 0xC3),
+ "loafer": (0xEE, 0xF4, 0xDE),
+ "catskill white": (0xEE, 0xF6, 0xF7),
+ "twilight blue": (0xEE, 0xFD, 0xFF),
+ "jonquil": (0xEE, 0xFF, 0x9A),
+ "rice flower": (0xEE, 0xFF, 0xE2),
+ "jaffa": (0xEF, 0x86, 0x3F),
+ "gallery": (0xEF, 0xEF, 0xEF),
+ "porcelain": (0xEF, 0xF2, 0xF3),
+ "mauvelous": (0xF0, 0x91, 0xA9),
+ "golden dream": (0xF0, 0xD5, 0x2D),
+ "golden sand": (0xF0, 0xDB, 0x7D),
+ "buff": (0xF0, 0xDC, 0x82),
+ "prim": (0xF0, 0xE2, 0xEC),
+ "khaki": (0xF0, 0xE6, 0x8C),
+ "selago": (0xF0, 0xEE, 0xFD),
+ "titan white": (0xF0, 0xEE, 0xFF),
+ "alice blue": (0xF0, 0xF8, 0xFF),
+ "feta": (0xF0, 0xFC, 0xEA),
+ "gold drop": (0xF1, 0x82, 0x00),
+ "wewak": (0xF1, 0x9B, 0xAB),
+ "sahara sand": (0xF1, 0xE7, 0x88),
+ "parchment": (0xF1, 0xE9, 0xD2),
+ "blue chalk": (0xF1, 0xE9, 0xFF),
+ "mint julep": (0xF1, 0xEE, 0xC1),
+ "seashell": (0xF1, 0xF1, 0xF1),
+ "saltpan": (0xF1, 0xF7, 0xF2),
+ "tidal": (0xF1, 0xFF, 0xAD),
+ "chiffon": (0xF1, 0xFF, 0xC8),
+ "flamingo": (0xF2, 0x55, 0x2A),
+ "tangerine": (0xF2, 0x85, 0x00),
+ "mandys pink": (0xF2, 0xC3, 0xB2),
+ "concrete": (0xF2, 0xF2, 0xF2),
+ "black squeeze": (0xF2, 0xFA, 0xFA),
+ "pomegranate": (0xF3, 0x47, 0x23),
+ "buttercup": (0xF3, 0xAD, 0x16),
+ "new orleans": (0xF3, 0xD6, 0x9D),
+ "vanilla ice": (0xF3, 0xD9, 0xDF),
+ "sidecar": (0xF3, 0xE7, 0xBB),
+ "dawn pink": (0xF3, 0xE9, 0xE5),
+ "wheatfield": (0xF3, 0xED, 0xCF),
+ "canary": (0xF3, 0xFB, 0x62),
+ "orinoco": (0xF3, 0xFB, 0xD4),
+ "carla": (0xF3, 0xFF, 0xD8),
+ "hollywood cerise": (0xF4, 0x00, 0xA1),
+ "sandy brown": (0xF4, 0xA4, 0x60),
+ "saffron": (0xF4, 0xC4, 0x30),
+ "ripe lemon": (0xF4, 0xD8, 0x1C),
+ "janna": (0xF4, 0xEB, 0xD3),
+ "pampas": (0xF4, 0xF2, 0xEE),
+ "wild sand": (0xF4, 0xF4, 0xF4),
+ "zircon": (0xF4, 0xF8, 0xFF),
+ "froly": (0xF5, 0x75, 0x84),
+ "cream can": (0xF5, 0xC8, 0x5C),
+ "manhattan": (0xF5, 0xC9, 0x99),
+ "maize": (0xF5, 0xD5, 0xA0),
+ "wheat": (0xF5, 0xDE, 0xB3),
+ "sandwisp": (0xF5, 0xE7, 0xA2),
+ "pot pourri": (0xF5, 0xE7, 0xE2),
+ "albescent white": (0xF5, 0xE9, 0xD3),
+ "soft peach": (0xF5, 0xED, 0xEF),
+ "ecru white": (0xF5, 0xF3, 0xE5),
+ "beige": (0xF5, 0xF5, 0xDC),
+ "golden fizz": (0xF5, 0xFB, 0x3D),
+ "australian mint": (0xF5, 0xFF, 0xBE),
+ "french rose": (0xF6, 0x4A, 0x8A),
+ "brilliant rose": (0xF6, 0x53, 0xA6),
+ "illusion": (0xF6, 0xA4, 0xC9),
+ "merino": (0xF6, 0xF0, 0xE6),
+ "black haze": (0xF6, 0xF7, 0xF7),
+ "spring sun": (0xF6, 0xFF, 0xDC),
+ "violet red": (0xF7, 0x46, 0x8A),
+ "chilean fire": (0xF7, 0x77, 0x03),
+ "persian pink": (0xF7, 0x7F, 0xBE),
+ "rajah": (0xF7, 0xB6, 0x68),
+ "azalea": (0xF7, 0xC8, 0xDA),
+ "we peep": (0xF7, 0xDB, 0xE6),
+ "quarter spanish white": (0xF7, 0xF2, 0xE1),
+ "whisper": (0xF7, 0xF5, 0xFA),
+ "snow drift": (0xF7, 0xFA, 0xF7),
+ "casablanca": (0xF8, 0xB8, 0x53),
+ "chantilly": (0xF8, 0xC3, 0xDF),
+ "cherub": (0xF8, 0xD9, 0xE9),
+ "marzipan": (0xF8, 0xDB, 0x9D),
+ "energy yellow": (0xF8, 0xDD, 0x5C),
+ "givry": (0xF8, 0xE4, 0xBF),
+ "white linen": (0xF8, 0xF0, 0xE8),
+ "magnolia": (0xF8, 0xF4, 0xFF),
+ "spring wood": (0xF8, 0xF6, 0xF1),
+ "coconut cream": (0xF8, 0xF7, 0xDC),
+ "white lilac": (0xF8, 0xF7, 0xFC),
+ "desert storm": (0xF8, 0xF8, 0xF7),
+ "texas": (0xF8, 0xF9, 0x9C),
+ "corn field": (0xF8, 0xFA, 0xCD),
+ "mimosa": (0xF8, 0xFD, 0xD3),
+ "carnation": (0xF9, 0x5A, 0x61),
+ "saffron mango": (0xF9, 0xBF, 0x58),
+ "carousel pink": (0xF9, 0xE0, 0xED),
+ "dairy cream": (0xF9, 0xE4, 0xBC),
+ "portica": (0xF9, 0xE6, 0x63),
+ "amour": (0xF9, 0xEA, 0xF3),
+ "rum swizzle": (0xF9, 0xF8, 0xE4),
+ "dolly": (0xF9, 0xFF, 0x8B),
+ "sugar cane": (0xF9, 0xFF, 0xF6),
+ "ecstasy": (0xFA, 0x78, 0x14),
+ "tan hide": (0xFA, 0x9D, 0x5A),
+ "corvette": (0xFA, 0xD3, 0xA2),
+ "peach yellow": (0xFA, 0xDF, 0xAD),
+ "turbo": (0xFA, 0xE6, 0x00),
+ "astra": (0xFA, 0xEA, 0xB9),
+ "champagne": (0xFA, 0xEC, 0xCC),
+ "linen": (0xFA, 0xF0, 0xE6),
+ "fantasy": (0xFA, 0xF3, 0xF0),
+ "citrine white": (0xFA, 0xF7, 0xD6),
+ "alabaster": (0xFA, 0xFA, 0xFA),
+ "hint of yellow": (0xFA, 0xFD, 0xE4),
+ "milan": (0xFA, 0xFF, 0xA4),
+ "brink pink": (0xFB, 0x60, 0x7F),
+ "geraldine": (0xFB, 0x89, 0x89),
+ "lavender rose": (0xFB, 0xA0, 0xE3),
+ "sea buckthorn": (0xFB, 0xA1, 0x29),
+ "sun": (0xFB, 0xAC, 0x13),
+ "lavender pink": (0xFB, 0xAE, 0xD2),
+ "rose bud": (0xFB, 0xB2, 0xA3),
+ "cupid": (0xFB, 0xBE, 0xDA),
+ "classic rose": (0xFB, 0xCC, 0xE7),
+ "apricot peach": (0xFB, 0xCE, 0xB1),
+ "banana mania": (0xFB, 0xE7, 0xB2),
+ "marigold yellow": (0xFB, 0xE8, 0x70),
+ "festival": (0xFB, 0xE9, 0x6C),
+ "sweet corn": (0xFB, 0xEA, 0x8C),
+ "candy corn": (0xFB, 0xEC, 0x5D),
+ "hint of red": (0xFB, 0xF9, 0xF9),
+ "shalimar": (0xFB, 0xFF, 0xBA),
+ "shocking pink": (0xFC, 0x0F, 0xC0),
+ "tickle me pink": (0xFC, 0x80, 0xA5),
+ "tree poppy": (0xFC, 0x9C, 0x1D),
+ "lightning yellow": (0xFC, 0xC0, 0x1E),
+ "goldenrod": (0xFC, 0xD6, 0x67),
+ "candlelight": (0xFC, 0xD9, 0x17),
+ "cherokee": (0xFC, 0xDA, 0x98),
+ "double pearl lusta": (0xFC, 0xF4, 0xD0),
+ "pearl lusta": (0xFC, 0xF4, 0xDC),
+ "vista white": (0xFC, 0xF8, 0xF7),
+ "bianca": (0xFC, 0xFB, 0xF3),
+ "moon glow": (0xFC, 0xFE, 0xDA),
+ "china ivory": (0xFC, 0xFF, 0xE7),
+ "ceramic": (0xFC, 0xFF, 0xF9),
+ "torch red": (0xFD, 0x0E, 0x35),
+ "wild watermelon": (0xFD, 0x5B, 0x78),
+ "crusta": (0xFD, 0x7B, 0x33),
+ "sorbus": (0xFD, 0x7C, 0x07),
+ "sweet pink": (0xFD, 0x9F, 0xA2),
+ "light apricot": (0xFD, 0xD5, 0xB1),
+ "pig pink": (0xFD, 0xD7, 0xE4),
+ "cinderella": (0xFD, 0xE1, 0xDC),
+ "golden glow": (0xFD, 0xE2, 0x95),
+ "lemon": (0xFD, 0xE9, 0x10),
+ "old lace": (0xFD, 0xF5, 0xE6),
+ "half colonial white": (0xFD, 0xF6, 0xD3),
+ "drover": (0xFD, 0xF7, 0xAD),
+ "pale prim": (0xFD, 0xFE, 0xB8),
+ "cumulus": (0xFD, 0xFF, 0xD5),
+ "persian rose": (0xFE, 0x28, 0xA2),
+ "sunset orange": (0xFE, 0x4C, 0x40),
+ "bittersweet": (0xFE, 0x6F, 0x5E),
+ "california": (0xFE, 0x9D, 0x04),
+ "yellow sea": (0xFE, 0xA9, 0x04),
+ "melon": (0xFE, 0xBA, 0xAD),
+ "bright sun": (0xFE, 0xD3, 0x3C),
+ "dandelion": (0xFE, 0xD8, 0x5D),
+ "salomie": (0xFE, 0xDB, 0x8D),
+ "cape honey": (0xFE, 0xE5, 0xAC),
+ "remy": (0xFE, 0xEB, 0xF3),
+ "oasis": (0xFE, 0xEF, 0xCE),
+ "bridesmaid": (0xFE, 0xF0, 0xEC),
+ "beeswax": (0xFE, 0xF2, 0xC7),
+ "bleach white": (0xFE, 0xF3, 0xD8),
+ "pipi": (0xFE, 0xF4, 0xCC),
+ "half spanish white": (0xFE, 0xF4, 0xDB),
+ "wisp pink": (0xFE, 0xF4, 0xF8),
+ "provincial pink": (0xFE, 0xF5, 0xF1),
+ "half dutch white": (0xFE, 0xF7, 0xDE),
+ "solitaire": (0xFE, 0xF8, 0xE2),
+ "white pointer": (0xFE, 0xF8, 0xFF),
+ "off yellow": (0xFE, 0xF9, 0xE3),
+ "orange white": (0xFE, 0xFC, 0xED),
+ "red": (0xFF, 0x00, 0x00),
+ "dark red": (0x64, 0x00, 0x00),
+ "rose": (0xFF, 0x00, 0x7F),
+ "purple pizzazz": (0xFF, 0x00, 0xCC),
+ "magenta": (0xFF, 0x00, 0xFF),
+ "fuchsia": (0xFF, 0x00, 0xFF),
+ "dark magenta": (0xAF, 0x00, 0xAF),
+ "scarlet": (0xFF, 0x24, 0x00),
+ "wild strawberry": (0xFF, 0x33, 0x99),
+ "razzle dazzle rose": (0xFF, 0x33, 0xCC),
+ "radical red": (0xFF, 0x35, 0x5E),
+ "red orange": (0xFF, 0x3F, 0x34),
+ "coral red": (0xFF, 0x40, 0x40),
+ "vermilion": (0xFF, 0x4D, 0x00),
+ "international orange": (0xFF, 0x4F, 0x00),
+ "outrageous orange": (0xFF, 0x60, 0x37),
+ "blaze orange": (0xFF, 0x66, 0x00),
+ "pink flamingo": (0xFF, 0x66, 0xFF),
+ "orange": (0xFF, 0x68, 0x1F),
+ "hot pink": (0xFF, 0x69, 0xB4),
+ "persimmon": (0xFF, 0x6B, 0x53),
+ "blush pink": (0xFF, 0x6F, 0xFF),
+ "burning orange": (0xFF, 0x70, 0x34),
+ "pumpkin": (0xFF, 0x75, 0x18),
+ "flamenco": (0xFF, 0x7D, 0x07),
+ "flush orange": (0xFF, 0x7F, 0x00),
+ "coral": (0xFF, 0x7F, 0x50),
+ "salmon": (0xFF, 0x8C, 0x69),
+ "pizazz": (0xFF, 0x90, 0x00),
+ "west side": (0xFF, 0x91, 0x0F),
+ "pink salmon": (0xFF, 0x91, 0xA4),
+ "neon carrot": (0xFF, 0x99, 0x33),
+ "atomic tangerine": (0xFF, 0x99, 0x66),
+ "vivid tangerine": (0xFF, 0x99, 0x80),
+ "sunshade": (0xFF, 0x9E, 0x2C),
+ "orange peel": (0xFF, 0xA0, 0x00),
+ "mona lisa": (0xFF, 0xA1, 0x94),
+ "web orange": (0xFF, 0xA5, 0x00),
+ "carnation pink": (0xFF, 0xA6, 0xC9),
+ "hit pink": (0xFF, 0xAB, 0x81),
+ "yellow orange": (0xFF, 0xAE, 0x42),
+ "cornflower lilac": (0xFF, 0xB0, 0xAC),
+ "sundown": (0xFF, 0xB1, 0xB3),
+ "my sin": (0xFF, 0xB3, 0x1F),
+ "texas rose": (0xFF, 0xB5, 0x55),
+ "cotton candy": (0xFF, 0xB7, 0xD5),
+ "macaroni and cheese": (0xFF, 0xB9, 0x7B),
+ "selective yellow": (0xFF, 0xBA, 0x00),
+ "koromiko": (0xFF, 0xBD, 0x5F),
+ "amber": (0xFF, 0xBF, 0x00),
+ "wax flower": (0xFF, 0xC0, 0xA8),
+ "pink": (0xFF, 0xC0, 0xCB),
+ "your pink": (0xFF, 0xC3, 0xC0),
+ "supernova": (0xFF, 0xC9, 0x01),
+ "flesh": (0xFF, 0xCB, 0xA4),
+ "sunglow": (0xFF, 0xCC, 0x33),
+ "golden tainoi": (0xFF, 0xCC, 0x5C),
+ "peach orange": (0xFF, 0xCC, 0x99),
+ "chardonnay": (0xFF, 0xCD, 0x8C),
+ "pastel pink": (0xFF, 0xD1, 0xDC),
+ "romantic": (0xFF, 0xD2, 0xB7),
+ "grandis": (0xFF, 0xD3, 0x8C),
+ "gold": (0xFF, 0xD7, 0x00),
+ "school bus yellow": (0xFF, 0xD8, 0x00),
+ "cosmos": (0xFF, 0xD8, 0xD9),
+ "mustard": (0xFF, 0xDB, 0x58),
+ "peach schnapps": (0xFF, 0xDC, 0xD6),
+ "caramel": (0xFF, 0xDD, 0xAF),
+ "tuft bush": (0xFF, 0xDD, 0xCD),
+ "watusi": (0xFF, 0xDD, 0xCF),
+ "pink lace": (0xFF, 0xDD, 0xF4),
+ "navajo white": (0xFF, 0xDE, 0xAD),
+ "frangipani": (0xFF, 0xDE, 0xB3),
+ "pippin": (0xFF, 0xE1, 0xDF),
+ "pale rose": (0xFF, 0xE1, 0xF2),
+ "negroni": (0xFF, 0xE2, 0xC5),
+ "cream brulee": (0xFF, 0xE5, 0xA0),
+ "peach": (0xFF, 0xE5, 0xB4),
+ "tequila": (0xFF, 0xE6, 0xC7),
+ "kournikova": (0xFF, 0xE7, 0x72),
+ "sandy beach": (0xFF, 0xEA, 0xC8),
+ "karry": (0xFF, 0xEA, 0xD4),
+ "broom": (0xFF, 0xEC, 0x13),
+ "colonial white": (0xFF, 0xED, 0xBC),
+ "derby": (0xFF, 0xEE, 0xD8),
+ "vis vis": (0xFF, 0xEF, 0xA1),
+ "egg white": (0xFF, 0xEF, 0xC1),
+ "papaya whip": (0xFF, 0xEF, 0xD5),
+ "fair pink": (0xFF, 0xEF, 0xEC),
+ "peach cream": (0xFF, 0xF0, 0xDB),
+ "lavender blush": (0xFF, 0xF0, 0xF5),
+ "gorse": (0xFF, 0xF1, 0x4F),
+ "buttermilk": (0xFF, 0xF1, 0xB5),
+ "pink lady": (0xFF, 0xF1, 0xD8),
+ "forget me not": (0xFF, 0xF1, 0xEE),
+ "tutu": (0xFF, 0xF1, 0xF9),
+ "picasso": (0xFF, 0xF3, 0x9D),
+ "chardon": (0xFF, 0xF3, 0xF1),
+ "paris daisy": (0xFF, 0xF4, 0x6E),
+ "barley white": (0xFF, 0xF4, 0xCE),
+ "egg sour": (0xFF, 0xF4, 0xDD),
+ "sazerac": (0xFF, 0xF4, 0xE0),
+ "serenade": (0xFF, 0xF4, 0xE8),
+ "chablis": (0xFF, 0xF4, 0xF3),
+ "seashell peach": (0xFF, 0xF5, 0xEE),
+ "sauvignon": (0xFF, 0xF5, 0xF3),
+ "milk punch": (0xFF, 0xF6, 0xD4),
+ "varden": (0xFF, 0xF6, 0xDF),
+ "rose white": (0xFF, 0xF6, 0xF5),
+ "baja white": (0xFF, 0xF8, 0xD1),
+ "gin fizz": (0xFF, 0xF9, 0xE2),
+ "early dawn": (0xFF, 0xF9, 0xE6),
+ "lemon chiffon": (0xFF, 0xFA, 0xCD),
+ "bridal heath": (0xFF, 0xFA, 0xF4),
+ "scotch mist": (0xFF, 0xFB, 0xDC),
+ "soapstone": (0xFF, 0xFB, 0xF9),
+ "witch haze": (0xFF, 0xFC, 0x99),
+ "buttery white": (0xFF, 0xFC, 0xEA),
+ "island spice": (0xFF, 0xFC, 0xEE),
+ "cream": (0xFF, 0xFD, 0xD0),
+ "chilean heath": (0xFF, 0xFD, 0xE6),
+ "travertine": (0xFF, 0xFD, 0xE8),
+ "orchid white": (0xFF, 0xFD, 0xF3),
+ "quarter pearl lusta": (0xFF, 0xFD, 0xF4),
+ "half and half": (0xFF, 0xFE, 0xE1),
+ "apricot white": (0xFF, 0xFE, 0xEC),
+ "rice cake": (0xFF, 0xFE, 0xF0),
+ "black white": (0xFF, 0xFE, 0xF6),
+ "romance": (0xFF, 0xFE, 0xFD),
+ "yellow": (0xFF, 0xFF, 0x00),
+ "laser lemon": (0xFF, 0xFF, 0x66),
+ "pale canary": (0xFF, 0xFF, 0x99),
+ "portafino": (0xFF, 0xFF, 0xB4),
+ "ivory": (0xFF, 0xFF, 0xF0),
+ "white": (0xFF, 0xFF, 0xFF),
+}
+
+
+def clear() -> str:
+ return "\e[H\e[2J"
+
+
+def clear_screen() -> str:
+ return "\e[H\e[2J"
+
+
+def reset() -> str:
+ return "\e[m"
+
+
+def normal() -> str:
+ return "\e[m"
+
+
+def bold() -> str:
+ return "\e[1m"
+
+
+def italic() -> str:
+ return "\e[3m"
+
+
+def italics() -> str:
+ return "\e[3m"
+
+
+def underline() -> str:
+ return "\e[4m"
+
+
+def strikethrough() -> str:
+ return "\e[9m"
+
+
+def strike_through() -> str:
+ return "\e[9m"
+
+
+def is_16color(num: int) -> bool:
+ return num == 255 or num == 128
+
+
+def is_216color(num: int) -> bool:
+ return num in set([0, 95, 135, 175, 223, 255])
+
+
+def _simple_color_number(red: int, green: int, blue: int) -> int:
+ r = red > 0
+ g = green > 0
+ b = blue > 0
+ return b << 2 | g << 1 | r
+
+
+def fg_16color(red: int, green: int, blue: int) -> str:
+ code = _simple_color_number(red, green, blue) + 30
+ bright_count = 0
+ if red > 128:
+ bright_count += 1
+ if green > 128:
+ bright_count += 1
+ if blue > 128:
+ bright_count += 1
+ if bright_count > 1:
+ code += 60
+ return f"\e[{code}m"
+
+
+def bg_16color(red: int, green: int, blue: int) -> str:
+ code = _simple_color_number(red, green, blue) + 40
+ bright_count = 0
+ if red > 128:
+ bright_count += 1
+ if green > 128:
+ bright_count += 1
+ if blue > 128:
+ bright_count += 1
+ if bright_count > 1:
+ code += 60
+ return f"\e[{code}m"
+
+
+def _pixel_to_216color(n: int) -> int:
+ if n >= 255:
+ return 5
+ if n >= 233:
+ return 4
+ if n >= 175:
+ return 3
+ if n >= 135:
+ return 2
+ if n >= 95:
+ return 1
+ return 0
+
+
+def fg_216color(red: int, green: int, blue: int) -> str:
+ r = _pixel_to_216color(red)
+ g = _pixel_to_216color(green)
+ b = _pixel_to_216color(blue)
+ code = 16 + r * 36 + g * 6 + b
+ return f"\e[38;5;{code}m"
+
+
+def bg_216color(red: int, green: int, blue: int) -> str:
+ r = _pixel_to_216color(red)
+ g = _pixel_to_216color(green)
+ b = _pixel_to_216color(blue)
+ code = 16 + r * 36 + g * 6 + b
+ return f"\e[48;5;{code}m"
+
+
+def fg_24bit(red: int, green: int, blue: int) -> str:
+ return f"\e[38;2;{red};{green};{blue}m"
+
+
+def bg_24bit(red: int, green: int, blue: int) -> str:
+ return f"\e[48;2;{red};{green};{blue}m"
+
+
+def _find_color_by_name(name: str) -> Tuple[int, int, int]:
+ rgb = COLOR_NAMES_TO_RGB.get(name.lower(), None)
+ if rgb is None:
+ name = guess_name(name)
+ rgb = COLOR_NAMES_TO_RGB.get(name.lower(), None)
+ assert rgb is not None
+ return rgb
+
+
+def fg(name: Optional[str] = "",
+ red: Optional[int] = None,
+ green: Optional[int] = None,
+ blue: Optional[int] = None,
+ *,
+ force_16color: bool = False,
+ force_216color: bool = False) -> str:
+ if name is not None and string_utils.is_full_string(name):
+ rgb = _find_color_by_name(name)
+ return fg(
+ None,
+ rgb[0],
+ rgb[1],
+ rgb[2],
+ force_16color=force_16color,
+ force_216color=force_216color
+ )
+
+ if red is None:
+ red = 0
+ if green is None:
+ green = 0
+ if blue is None:
+ blue = 0
+ if (
+ is_16color(red) and is_16color(green) and is_16color(blue)
+ ) or force_16color:
+ logger.debug("Using 16-color strategy")
+ return fg_16color(red, green, blue)
+ if (
+ is_216color(red) and is_216color(green) and is_216color(blue)
+ ) or force_216color:
+ logger.debug("Using 216-color strategy")
+ return fg_216color(red, green, blue)
+ logger.debug("Using 24-bit color strategy")
+ return fg_24bit(red, green, blue)
+
+
+def _rgb_to_yiq(rgb: Tuple[int, int, int]) -> int:
+ return (rgb[0] * 299 + rgb[1] * 587 + rgb[2] * 114) // 1000
+
+
+def _contrast(rgb: Tuple[int, int, int]) -> Tuple[int, int, int]:
+ if _rgb_to_yiq(rgb) < 128:
+ return (0xff, 0xff, 0xff)
+ return (0, 0, 0)
+
+
+def pick_contrasting_color(name: Optional[str] = "",
+ red: Optional[int] = None,
+ green: Optional[int] = None,
+ blue: Optional[int] = None) -> Tuple[int, int, int]:
+ if name is not None and string_utils.is_full_string(name):
+ rgb = _find_color_by_name(name)
+ else:
+ r = red if red is not None else 0
+ g = green if green is not None else 0
+ b = blue if blue is not None else 0
+ rgb = (r, g, b)
+ assert rgb is not None
+ return _contrast(rgb)
+
+
+def guess_name(name: str) -> str:
+ best_guess = None
+ max_ratio = None
+ for possibility in COLOR_NAMES_TO_RGB:
+ r = difflib.SequenceMatcher(
+ None,
+ name,
+ possibility
+ ).ratio()
+ if max_ratio is None or r > max_ratio:
+ max_ratio = r
+ best_guess = possibility
+ assert best_guess is not None
+ logger.debug(f"Best guess at color name is {best_guess}")
+ return best_guess
+
+
+def bg(name: Optional[str] = "",
+ red: Optional[int] = None,
+ green: Optional[int] = None,
+ blue: Optional[int] = None,
+ *,
+ force_16color: bool = False,
+ force_216color: bool = False) -> str:
+ if name is not None and string_utils.is_full_string(name):
+ rgb = _find_color_by_name(name)
+ return bg(
+ None,
+ rgb[0],
+ rgb[1],
+ rgb[2],
+ force_16color=force_16color,
+ force_216color=force_216color
+ )
+ if red is None:
+ red = 0
+ if green is None:
+ green = 0
+ if blue is None:
+ blue = 0
+ if (
+ is_16color(red) and is_16color(green) and is_16color(blue)
+ ) or force_16color:
+ logger.debug("Using 16-color strategy")
+ return bg_16color(red, green, blue)
+ if (
+ is_216color(red) and is_216color(green) and is_216color(blue)
+ ) or force_216color:
+ logger.debug("Using 216-color strategy")
+ return bg_216color(red, green, blue)
+ logger.debug("Using 24-bit color strategy")
+ return bg_24bit(red, green, blue)
+
+
+def main() -> None:
+ name = " ".join(sys.argv[1:])
+ for possibility in COLOR_NAMES_TO_RGB:
+ if name in possibility:
+ f = fg(possibility)
+ b = bg(possibility)
+ _ = pick_contrasting_color(possibility)
+ xf = fg(None, _[0], _[1], _[2])
+ xb = bg(None, _[0], _[1], _[2])
+ print(f'{f}{xb}{possibility}{reset()}\t\t\t'
+ f'{b}{xf}{possibility}{reset()}')
+
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+#!/usr/bin/python3
+
+import argparse
+import logging
+import os
+
+import string_utils
+
+logger = logging.getLogger(__name__)
+
+
+class ActionNoYes(argparse.Action):
+ def __init__(
+ self,
+ option_strings,
+ dest,
+ default=None,
+ required=False,
+ help=None
+ ):
+ if default is None:
+ msg = 'You must provide a default with Yes/No action'
+ logger.critical(msg)
+ raise ValueError(msg)
+ if len(option_strings) != 1:
+ msg = 'Only single argument is allowed with YesNo action'
+ logger.critical(msg)
+ raise ValueError(msg)
+ opt = option_strings[0]
+ if not opt.startswith('--'):
+ msg = 'Yes/No arguments must be prefixed with --'
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ opt = opt[2:]
+ opts = ['--' + opt, '--no_' + opt]
+ super().__init__(
+ opts,
+ dest,
+ nargs=0,
+ const=None,
+ default=default,
+ required=required,
+ help=help
+ )
+
+ def __call__(self, parser, namespace, values, option_strings=None):
+ if (
+ option_strings.startswith('--no-') or
+ option_strings.startswith('--no_')
+ ):
+ setattr(namespace, self.dest, False)
+ else:
+ setattr(namespace, self.dest, True)
+
+
+def valid_bool(v):
+ if isinstance(v, bool):
+ return v
+ return string_utils.to_bool(v)
+
+
+def valid_ip(ip: str) -> str:
+ s = string_utils.extract_ip_v4(ip.strip())
+ if s is not None:
+ return s
+ msg = f"{ip} is an invalid IP address"
+ logger.warning(msg)
+ raise argparse.ArgumentTypeError(msg)
+
+
+def valid_mac(mac: str) -> str:
+ s = string_utils.extract_mac_address(mac)
+ if s is not None:
+ return s
+ msg = f"{mac} is an invalid MAC address"
+ logger.warning(msg)
+ raise argparse.ArgumentTypeError(msg)
+
+
+def valid_percentage(num: str) -> float:
+ n = float(num)
+ if 0.0 <= n <= 100.0:
+ return n
+ msg = f"{num} is an invalid percentage; expected 0 <= n <= 100.0"
+ logger.warning(msg)
+ raise argparse.ArgumentTypeError(msg)
+
+
+def valid_filename(filename: str) -> str:
+ s = filename.strip()
+ if os.path.exists(s):
+ return s
+ msg = f"{filename} was not found and is therefore invalid."
+ logger.warning(msg)
+ raise argparse.ArgumentTypeError(msg)
--- /dev/null
+#!/usr/bin/env python3
+
+import functools
+import logging
+import os
+import sys
+import time
+import traceback
+
+import argparse_utils
+import config
+import logging_utils
+
+
+logger = logging.getLogger(__name__)
+
+args = config.add_commandline_args(
+ f'Bootstrap ({__file__})',
+ 'Args related to python program bootstrapper and Swiss army knife')
+args.add_argument(
+ '--debug_unhandled_exceptions',
+ action=argparse_utils.ActionNoYes,
+ default=False,
+ help='Break into debugger on top level unhandled exceptions for interactive debugging'
+)
+
+
+def handle_uncaught_exception(
+ exc_type,
+ exc_value,
+ exc_traceback):
+ if issubclass(exc_type, KeyboardInterrupt):
+ sys.__excepthook__(exc_type, exc_value, exc_traceback)
+ return
+ logger.exception(f'Unhandled top level {exc_type}',
+ exc_info=(exc_type, exc_value, exc_traceback))
+ traceback.print_exception(exc_type, exc_value, exc_traceback)
+ if config.config['debug_unhandled_exceptions']:
+ logger.info("Invoking the debugger...")
+ breakpoint()
+
+
+def initialize(funct):
+ """Remember to initialize config and logging before running main."""
+ @functools.wraps(funct)
+ def initialize_wrapper(*args, **kwargs):
+ sys.excepthook = handle_uncaught_exception
+ config.parse()
+ logging_utils.initialize_logging(logging.getLogger())
+ logger.debug(f"About to invoke {funct}...")
+ start = time.perf_counter()
+ ret = funct(*args, **kwargs)
+ end = time.perf_counter()
+ logger.debug(f'{funct} returned {ret}.')
+ (utime, stime, cutime, cstime, elapsed_time) = os.times()
+ logger.debug(f'\nuser: {utime}s\n'
+ f'system: {stime}s\n'
+ f'child user: {cutime}s\n'
+ f'child system: {cstime}s\n'
+ f'elapsed: {elapsed_time}s\n'
+ f'walltime: {end - start}s\n')
+ logger.info(f'Exit {ret}')
+ sys.exit(ret)
+ return initialize_wrapper
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities for dealing with webcam images."""
+
+import logging
+import platform
+import subprocess
+from typing import NamedTuple, Optional
+
+import cv2 # type: ignore
+import numpy as np
+import requests
+
+import decorator_utils
+
+logger = logging.getLogger(__name__)
+
+
+class RawJpgHsv(NamedTuple):
+ """Raw image bytes, the jpeg image and the HSV (hue saturation value) image."""
+ raw: Optional[bytes]
+ jpg: Optional[np.ndarray]
+ hsv: Optional[np.ndarray]
+
+
+class BlueIrisImageMetadata(NamedTuple):
+ """Is a Blue Iris image bad (big grey borders around it) or infrared?"""
+ is_bad_image: bool
+ is_infrared_image: bool
+
+
+def analyze_blue_iris_image(hsv: np.ndarray) -> BlueIrisImageMetadata:
+ """See if a Blue Iris image is bad and infrared."""
+ rows, cols, _ = hsv.shape
+ num_pixels = rows * cols
+ hs_zero_count = 0
+ gray_count = 0
+ for r in range(rows):
+ for c in range(cols):
+ pixel = hsv[(r, c)]
+ if pixel[0] == 0 and pixel[1] == 0:
+ hs_zero_count += 1
+ if abs(pixel[2] - 64) <= 10:
+ gray_count += 1
+ logger.debug(f"gray#={gray_count}, hs0#={hs_zero_count}")
+ return BlueIrisImageMetadata(
+ gray_count > (num_pixels * 0.33), hs_zero_count > (num_pixels * 0.75)
+ )
+
+
+@decorator_utils.retry_if_none(tries=2, delay_sec=1, backoff=1.1)
+def fetch_camera_image_from_video_server(
+ camera_name: str, *, width: int = 256, quality: int = 70
+) -> Optional[bytes]:
+ """Fetch the raw webcam image from the video server."""
+ camera_name = camera_name.replace(".house", "")
+ camera_name = camera_name.replace(".cabin", "")
+ url = f"http://10.0.0.56:81/image/{camera_name}?w={width}&q={quality}"
+ try:
+ response = requests.get(url, stream=False, timeout=10.0)
+ if response.ok:
+ raw = response.content
+ tmp = np.frombuffer(raw, dtype="uint8")
+ jpg = cv2.imdecode(tmp, cv2.IMREAD_COLOR)
+ hsv = cv2.cvtColor(jpg, cv2.COLOR_BGR2HSV)
+ (is_bad_image, _) = analyze_blue_iris_image(hsv)
+ if not is_bad_image:
+ logger.debug(f"Got a good image from {url}")
+ return raw
+ except Exception as e:
+ logger.exception(e)
+ logger.warning(f"Got a bad image or HTTP error from {url}")
+ return None
+
+
+def blue_iris_camera_name_to_hostname(camera_name: str) -> str:
+ mapping = {
+ "driveway": "driveway.house",
+ "backyard": "backyard.house",
+ "frontdoor": "frontdoor.house",
+ "cabin_driveway": "driveway.cabin",
+ }
+ camera_name = mapping.get(camera_name, camera_name)
+ if "." not in camera_name:
+ hostname = platform.node()
+ suffix = hostname.split(".")[-1]
+ camera_name += f".{suffix}"
+ return camera_name
+
+
+@decorator_utils.retry_if_none(tries=2, delay_sec=1, backoff=1.1)
+def fetch_camera_image_from_rtsp_stream(
+ camera_name: str, *, width: int = 256
+) -> Optional[bytes]:
+ """Fetch the raw webcam image straight from the webcam's RTSP stream."""
+ hostname = blue_iris_camera_name_to_hostname(camera_name)
+ try:
+ cmd = [
+ "/usr/bin/timeout",
+ "-k 9s",
+ "8s",
+ "/usr/local/bin/ffmpeg",
+ "-y",
+ "-i",
+ f"rtsp://camera:IaLaIok@{hostname}:554/live",
+ "-f",
+ "singlejpeg",
+ "-vframes",
+ "1",
+ "-vf",
+ f"scale={width}:-1",
+ "-",
+ ]
+ with subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
+ ) as proc:
+ out, _ = proc.communicate(timeout=10)
+ return out
+ except Exception as e:
+ logger.exception(e)
+ logger.warning("Failed to retrieve image from RTSP stream")
+ return None
+
+
+@decorator_utils.timeout(seconds=30, use_signals=False)
+def _fetch_camera_image(
+ camera_name: str, *, width: int = 256, quality: int = 70
+) -> RawJpgHsv:
+ """Fetch a webcam image given the camera name."""
+ logger.debug("Trying to fetch camera image from video server")
+ raw = fetch_camera_image_from_video_server(
+ camera_name, width=width, quality=quality
+ )
+ if raw is None:
+ logger.debug(
+ "Reading from video server failed; trying direct RTSP stream"
+ )
+ raw = fetch_camera_image_from_rtsp_stream(camera_name, width=width)
+ if raw is not None and len(raw) > 0:
+ tmp = np.frombuffer(raw, dtype="uint8")
+ jpg = cv2.imdecode(tmp, cv2.IMREAD_COLOR)
+ hsv = cv2.cvtColor(jpg, cv2.COLOR_BGR2HSV)
+ return RawJpgHsv(
+ raw=raw,
+ jpg=jpg,
+ hsv=hsv,
+ )
+ logger.warning(
+ "Failed to retieve image from both video server and direct RTSP stream"
+ )
+ return RawJpgHsv(None, None, None)
+
+
+def fetch_camera_image(
+ camera_name: str, *, width: int = 256, quality: int = 70
+) -> RawJpgHsv:
+ try:
+ return _fetch_camera_image(camera_name, width=width, quality=quality)
+ except decorator_utils.TimeoutError:
+ return RawJpgHsv(None, None, None)
--- /dev/null
+#!/usr/bin/env python3
+
+"""Global configuration driven by commandline arguments (even across
+different modules). Usage:
+
+ module.py:
+ ----------
+ import config
+
+ parser = config.add_commandline_args(
+ "Module",
+ "Args related to module doing the thing.",
+ )
+ parser.add_argument(
+ "--module_do_the_thing",
+ type=bool,
+ default=True,
+ help="Should the module do the thing?"
+ )
+
+ main.py:
+ --------
+ import config
+
+ def main() -> None:
+ parser = config.add_commandline_args(
+ "Main",
+ "A program that does the thing.",
+ )
+ parser.add_argument(
+ "--dry_run",
+ type=bool,
+ default=False,
+ help="Should we really do the thing?"
+ )
+ config.parse() # Very important, this must be invoked!
+
+ If you set this up and remember to invoke config.parse(), all commandline
+ arguments will play nicely together:
+
+ % main.py -h
+ usage: main.py [-h]
+ [--module_do_the_thing MODULE_DO_THE_THING]
+ [--dry_run DRY_RUN]
+
+ Module:
+ Args related to module doing the thing.
+
+ --module_do_the_thing MODULE_DO_THE_THING
+ Should the module do the thing?
+
+ Main:
+ A program that does the thing
+
+ --dry_run
+ Should we really do the thing?
+
+ Arguments themselves should be accessed via config.config['arg_name']. e.g.
+
+ if not config.config['dry_run']:
+ module.do_the_thing()
+"""
+
+import argparse
+import pprint
+import re
+import sys
+from typing import Dict, Any
+
+# Note: at this point in time, logging hasn't been configured and
+# anything we log will come out the root logger.
+
+
+class LoadFromFile(argparse.Action):
+ """Helper to load a config file into argparse."""
+ def __call__ (self, parser, namespace, values, option_string = None):
+ with values as f:
+ buf = f.read()
+ argv = []
+ for line in buf.split(','):
+ line = line.strip()
+ line = line.strip('{')
+ line = line.strip('}')
+ m = re.match(r"^'([a-zA-Z_\-]+)'\s*:\s*(.*)$", line)
+ if m:
+ key = m.group(1)
+ value = m.group(2)
+ value = value.strip("'")
+ if value not in ('None', 'True', 'False'):
+ argv.append(f'--{key}')
+ argv.append(value)
+ parser.parse_args(argv, namespace)
+
+
+# A global parser that we will collect arguments into.
+args = argparse.ArgumentParser(
+ description=f"This program uses config.py ({__file__}) for global, cross-module configuration.",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ fromfile_prefix_chars="@"
+)
+config_parse_called = False
+
+# A global configuration dictionary that will contain parsed arguments
+# It is also this variable that modules use to access parsed arguments
+config: Dict[str, Any] = {}
+
+
+def add_commandline_args(title: str, description: str = ""):
+ """Create a new context for arguments and return a handle."""
+ return args.add_argument_group(title, description)
+
+
+group = add_commandline_args(
+ f'Global Config ({__file__})',
+ 'Args that control the global config itself; how meta!',
+)
+group.add_argument(
+ '--config_loadfile',
+ type=open,
+ action=LoadFromFile,
+ metavar='FILENAME',
+ default=None,
+ help='Config file from which to read args in lieu or in addition to commandline.',
+)
+group.add_argument(
+ '--config_dump',
+ default=False,
+ action='store_true',
+ help='Display the global configuration on STDERR at program startup.',
+)
+group.add_argument(
+ '--config_savefile',
+ type=str,
+ metavar='FILENAME',
+ default=None,
+ help='Populate config file compatible --config_loadfile to save config for later use.',
+)
+
+
+def parse() -> Dict[str, Any]:
+ """Main program should call this early in main()"""
+ global config_parse_called
+ config_parse_called = True
+ config.update(vars(args.parse_args()))
+
+ if config['config_savefile']:
+ with open(config['config_savefile'], 'w') as wf:
+ wf.write("\n".join(sys.argv[1:]))
+
+ if config['config_dump']:
+ dump_config()
+
+ return config
+
+
+def has_been_parsed() -> bool:
+ global config_parse_called
+ return config_parse_called
+
+
+def dump_config():
+ """Print the current config to stdout."""
+ print("Global Configuration:", file=sys.stderr)
+ pprint.pprint(config, stream=sys.stderr)
--- /dev/null
+#!/usr/bin/env python3
+
+"""Universal constants."""
+
+# Date/time based constants
+SECONDS_PER_MINUTE = 60
+SECONDS_PER_HOUR = 60 * SECONDS_PER_MINUTE
+SECONDS_PER_DAY = 24 * SECONDS_PER_HOUR
+SECONDS_PER_WEEK = 7 * SECONDS_PER_DAY
+MINUTES_PER_HOUR = 60
+MINUTES_PER_DAY = 24 * MINUTES_PER_HOUR
+MINUTES_PER_WEEK = 7 * MINUTES_PER_DAY
+HOURS_PER_DAY = 24
+HOURS_PER_WEEK = 7 * HOURS_PER_DAY
+DAYS_PER_WEEK = 7
+DATETIME_WEEKDAY_MONDAY = 0
+DATETIME_WEEKDAY_TUEDAY = 0
+DATETIME_WEEKDAY_WEDNESDAY = 0
+DATETIME_WEEKDAY_THURSDAY = 0
+DATETIME_WEEKDAY_FRIDAY = 0
+DATETIME_WEEKDAY_SATURDAY = 0
+DATETIME_WEEKDAY_SUNDAY = 0
--- /dev/null
+#!/usr/bin/env python3
+
+from numbers import Number
+from typing import Callable
+
+
+class Converter(object):
+ def __init__(self,
+ name: str,
+ category: str,
+ to_canonical: Callable,
+ from_canonical: Callable,
+ unit: str) -> None:
+ self.name = name
+ self.category = category
+ self.to_canonical = to_canonical
+ self.from_canonical = from_canonical
+ self.unit = unit
+
+ def to_canonical(self, n: Number) -> Number:
+ return self.to_canonical(n)
+
+ def from_canonical(self, n: Number) -> Number:
+ return self.from_canonical(n)
+
+ def unit_suffix(self) -> str:
+ return self.unit
+
+
+conversion_catalog = {
+ "Fahrenheit": Converter("Fahrenheit",
+ "temperature",
+ lambda f: (f - 32.0) * 0.55555555,
+ lambda c: c * 1.8 + 32.0,
+ "°F"),
+ "Celsius": Converter("Celsius",
+ "temperature",
+ lambda c: c,
+ lambda c: c,
+ "°C"),
+ "Kelvin": Converter("Kelvin",
+ "temperature",
+ lambda k: k - 273.15,
+ lambda c: c + 273.15,
+ "°K"),
+}
+
+
+def convert(magnitude: Number,
+ from_thing: str,
+ to_thing: str) -> Number:
+ src = conversion_catalog.get(from_thing, None)
+ dst = conversion_catalog.get(to_thing, None)
+ if src is None or dst is None:
+ raise ValueError("No known conversion")
+ return _convert(magnitude, src, dst)
+
+
+def _convert(magnitude: Number,
+ from_unit: Converter,
+ to_unit: Converter) -> Number:
+ canonical = from_unit.to_canonical(magnitude)
+ converted = to_unit.from_canonical(canonical)
+ return converted
+
+
+def f_to_c(temp_f: float) -> float:
+ """Fahrenheit to Celsius."""
+ return convert(temp_f, "Fahrenheit", "Celsius")
+
+
+def c_to_f(temp_c: float) -> float:
+ """Celsius to Fahrenheit."""
+ return convert(temp_c, "Celsius", "Fahrenheit")
--- /dev/null
+// antlr4 -Dlanguage=Python3 ./dateparse_utils.g4
+
+// Hi, self. In ANTLR grammars, there are two separate types of symbols: those
+// for the lexer and those for the parser. The former begin with a CAPITAL
+// whereas the latter begin with lowercase. The order of the lexer symbols
+// is the order that the lexer will recognize them in. There's a good tutorial
+// on this shit at:
+//
+// https://tomassetti.me/antlr-mega-tutorial/
+//
+// There are also a zillion premade grammars at:
+//
+// https://github.com/antlr/grammars-v4
+
+grammar dateparse_utils;
+
+parse: dateExpr ;
+
+dateExpr
+ : singleDateExpr
+ | baseAndOffsetDateExpr
+ ;
+
+singleDateExpr
+ : monthDayMaybeYearExpr
+ | dayMonthMaybeYearExpr
+ | yearMonthDayExpr
+ | specialDateMaybeYearExpr
+ | nthWeekdayInMonthMaybeYearExpr
+ | firstLastWeekdayInMonthMaybeYearExpr
+ ;
+
+monthDayMaybeYearExpr
+ : monthExpr DIV* dayOfMonth (DIV* year)?
+ ;
+
+dayMonthMaybeYearExpr
+ : dayOfMonth DIV* monthName (DIV* year)?
+ ;
+
+yearMonthDayExpr
+ : year DIV* monthName DIV* dayOfMonth
+ ;
+
+nthWeekdayInMonthMaybeYearExpr
+ : nth dayName ('in'|'of') monthName (DIV* year)?
+ ;
+
+firstLastWeekdayInMonthMaybeYearExpr
+ : firstOrLast dayName ('in'|'of'|DIV)? monthName (DIV* year)?
+ ;
+
+specialDateMaybeYearExpr
+ : specialDate (DIV* year)?
+ ;
+
+baseAndOffsetDateExpr
+ : baseDate deltaPlusMinusExpr
+ | deltaPlusMinusExpr baseDate
+ ;
+
+baseDate: singleDateExpr ;
+
+deltaPlusMinusExpr: deltaInt deltaUnit deltaBeforeAfter? ;
+
+deltaUnit: (WEEK|DAY|SUN|WEEKDAY) ;
+
+deltaBeforeAfter: (BEFORE|AFTER) ;
+
+monthExpr
+ : monthName
+ | monthNumber
+ ;
+
+year: DIGIT DIGIT DIGIT DIGIT ;
+
+specialDate: SPECIAL_DATE ;
+
+dayOfMonth: DIGIT? DIGIT ('st'|'nd'|'rd'|'th')? ;
+
+firstOrLast: (FIRST|LAST) ;
+
+nth: DIGIT ('st'|'nd'|'rd'|'th')? ;
+
+deltaInt: ('+'|'-')? DIGIT+ ;
+
+dayName: WEEKDAY ;
+
+monthName: MONTH ;
+
+monthNumber: DIGIT? DIGIT ;
+
+// ----------------------------------
+
+COMMENT: '#' ~[\r\n]* -> skip ;
+
+SPACE: [ \t\r\n] -> skip ;
+
+THE: 'the' -> skip ;
+
+DIV: ('/'|','|'.') ;
+
+MONTH: (JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC) ;
+
+JAN : 'jan'
+ | 'january'
+ ;
+
+FEB : 'feb'
+ | 'february'
+ ;
+
+MAR : 'mar'
+ | 'march'
+ ;
+
+APR : 'apr'
+ | 'april'
+ ;
+
+MAY : 'may'
+ ;
+
+JUN : 'jun'
+ | 'june'
+ ;
+
+JUL : 'jul'
+ | 'july'
+ ;
+
+AUG : 'aug'
+ | 'august'
+ ;
+
+SEP : 'sep'
+ | 'sept'
+ | 'september'
+ ;
+
+OCT : 'oct'
+ | 'october'
+ ;
+
+NOV : 'nov'
+ | 'november'
+ ;
+
+DEC : 'dec'
+ | 'december'
+ ;
+
+WEEKDAY: (SUN|MON|TUE|WED|THU|FRI|SAT) ;
+
+SUN : 'sun'
+ | 'suns'
+ | 'sunday'
+ | 'sundays'
+ ;
+
+MON : 'mon'
+ | 'mons'
+ | 'monday'
+ | 'mondays'
+ ;
+
+TUE : 'tue'
+ | 'tues'
+ | 'tuesday'
+ | 'tuesdays'
+ ;
+
+WED : 'wed'
+ | 'weds'
+ | 'wednesday'
+ | 'wednesdays'
+ ;
+
+THU : 'thu'
+ | 'thur'
+ | 'thurs'
+ | 'thursday'
+ | 'thursdays'
+ ;
+
+FRI : 'fri'
+ | 'fris'
+ | 'friday'
+ | 'fridays'
+ ;
+
+SAT : 'sat'
+ | 'sats'
+ | 'saturday'
+ | 'saturdays'
+ ;
+
+WEEK
+ : 'week'
+ | 'weeks'
+ ;
+
+DAY
+ : 'day'
+ | 'days'
+ ;
+
+SPECIAL_DATE
+ : TODAY
+ | NEW_YEARS_EVE
+ | NEW_YEARS_DAY
+ | MARTIN_LUTHER_KING_DAY
+ | PRESIDENTS_DAY
+ | EASTER
+ | MEMORIAL_DAY
+ | INDEPENDENCE_DAY
+ | LABOR_DAY
+ | COLUMBUS_DAY
+ | VETERANS_DAY
+ | THANKSGIVING_DAY
+ | CHRISTMAS_EVE
+ | CHRISTMAS
+ ;
+
+// today
+TODAY
+ : 'today'
+ ;
+
+// easte
+EASTER
+ : 'easter'
+ | 'easter sunday'
+ ;
+
+// newye
+NEW_YEARS_DAY
+ : 'new years'
+ | 'new years day'
+ | 'new year\'s'
+ | 'new year\'s day'
+ ;
+
+// newyeeve
+NEW_YEARS_EVE
+ : 'nye'
+ | 'new years eve'
+ | 'new year\'s eve'
+ ;
+
+// chris
+CHRISTMAS
+ : 'christmas'
+ | 'christmas day'
+ | 'xmas'
+ | 'xmas day'
+ ;
+
+// chriseve
+CHRISTMAS_EVE
+ : 'christmas eve'
+ | 'xmas eve'
+ ;
+
+// mlk
+MARTIN_LUTHER_KING_DAY
+ : 'martin luther king day'
+ | 'mlk day'
+ | 'mlk'
+ ;
+
+// memor
+MEMORIAL_DAY
+ : 'memorial'
+ | 'memorial day'
+ ;
+
+// indep
+INDEPENDENCE_DAY
+ : 'independence day'
+ ;
+
+// labor
+LABOR_DAY
+ : 'labor'
+ | 'labor day'
+ ;
+
+// presi
+PRESIDENTS_DAY
+ : 'presidents\' day'
+ | 'president\'s day'
+ | 'presidents day'
+ | 'presidents'
+ | 'president\'s'
+ | 'presidents\''
+ ;
+
+// colum
+COLUMBUS_DAY
+ : 'columbus'
+ | 'columbus day'
+ | 'indiginous peoples day'
+ | 'indiginous peoples\' day'
+ ;
+
+// veter
+VETERANS_DAY
+ : 'veterans'
+ | 'veterans day'
+ | 'veterans\' day'
+ ;
+
+// thank
+THANKSGIVING_DAY
+ : 'thanksgiving'
+ | 'thanksgiving day'
+ ;
+
+FIRST: 'first' ;
+
+LAST: 'last' ;
+
+BEFORE: 'before' ;
+
+AFTER: ('after'|'from') ;
+
+DIGIT: ('0'..'9') ;
--- /dev/null
+#!/usr/bin/env python3
+
+import antlr4 # type: ignore
+import datetime
+import dateutil.easter
+import holidays # type: ignore
+import re
+import sys
+from typing import Any, Dict, Optional
+
+from dateparse.dateparse_utilsLexer import dateparse_utilsLexer # type: ignore
+from dateparse.dateparse_utilsListener import dateparse_utilsListener # type: ignore
+from dateparse.dateparse_utilsParser import dateparse_utilsParser # type: ignore
+
+
+class ParseException(Exception):
+ def __init__(self, message: str) -> None:
+ self.message = message
+
+
+class DateParser(dateparse_utilsListener):
+ PARSE_TYPE_SINGLE_DATE_EXPR = 1
+ PARSE_TYPE_BASE_AND_OFFSET_EXPR = 2
+ CONSTANT_DAYS = 7
+ CONSTANT_WEEKS = 8
+ CONSTANT_MONTHS = 9
+ CONSTANT_YEARS = 10
+
+ def __init__(self):
+ self.month_name_to_number = {
+ "jan": 1,
+ "feb": 2,
+ "mar": 3,
+ "apr": 4,
+ "may": 5,
+ "jun": 6,
+ "jul": 7,
+ "aug": 8,
+ "sep": 9,
+ "oct": 10,
+ "nov": 11,
+ "dec": 12,
+ }
+ self.day_name_to_number = {
+ "mon": 0,
+ "tue": 1,
+ "wed": 2,
+ "thu": 3,
+ "fri": 4,
+ "sat": 5,
+ "sun": 6,
+ }
+ self.delta_unit_to_constant = {
+ "day": DateParser.CONSTANT_DAYS,
+ "wee": DateParser.CONSTANT_WEEKS,
+ }
+ self.date: Optional[datetime.date] = None
+
+ def parse_date_string(self, date_string: str) -> Optional[datetime.date]:
+ input_stream = antlr4.InputStream(date_string)
+ lexer = dateparse_utilsLexer(input_stream)
+ stream = antlr4.CommonTokenStream(lexer)
+ parser = dateparse_utilsParser(stream)
+ tree = parser.parse()
+ walker = antlr4.ParseTreeWalker()
+ walker.walk(self, tree)
+ return self.get_date()
+
+ def get_date(self) -> Optional[datetime.date]:
+ return self.date
+
+ def enterDateExpr(self, ctx: dateparse_utilsParser.DateExprContext):
+ self.date = None
+ self.context: Dict[str, Any] = {}
+ if ctx.singleDateExpr() is not None:
+ self.main_type = DateParser.PARSE_TYPE_SINGLE_DATE_EXPR
+ elif ctx.baseAndOffsetDateExpr() is not None:
+ self.main_type = DateParser.PARSE_TYPE_BASE_AND_OFFSET_EXPR
+
+ @staticmethod
+ def normalize_special_day_name(name: str) -> str:
+ name = name.lower()
+ name = name.replace("'", "")
+ name = name.replace("xmas", "christmas")
+ name = name.replace("mlk", "martin luther king")
+ name = name.replace(" ", "")
+ eve = "eve" if name[-3:] == "eve" else ""
+ name = name[:5] + eve
+ name = name.replace("washi", "presi")
+ return name
+
+ def parse_special(self, name: str) -> Optional[datetime.date]:
+ today = datetime.date.today()
+ year = self.context.get("year", today.year)
+ name = DateParser.normalize_special_day_name(self.context["special"])
+ if name == "today":
+ return today
+ if name == "easte":
+ return dateutil.easter.easter(year=year)
+ for holiday_date, holiday_name in sorted(
+ holidays.US(years=year).items()
+ ):
+ if "Observed" not in holiday_name:
+ holiday_name = DateParser.normalize_special_day_name(
+ holiday_name
+ )
+ if name == holiday_name:
+ return holiday_date
+ if name == "chriseve":
+ return datetime.date(year=year, month=12, day=24)
+ elif name == "newyeeve":
+ return datetime.date(year=year, month=12, day=31)
+ return None
+
+ def parse_normal(self) -> datetime.date:
+ if "month" not in self.context:
+ raise ParseException("Missing month")
+ if "day" not in self.context:
+ raise ParseException("Missing day")
+ if "year" not in self.context:
+ today = datetime.date.today()
+ self.context["year"] = today.year
+ return datetime.date(
+ year=int(self.context["year"]),
+ month=int(self.context["month"]),
+ day=int(self.context["day"]),
+ )
+
+ def exitDateExpr(self, ctx: dateparse_utilsParser.DateExprContext) -> None:
+ """When we leave the date expression, populate self.date."""
+ if "special" in self.context:
+ self.date = self.parse_special(self.context["special"])
+ else:
+ self.date = self.parse_normal()
+ assert self.date is not None
+
+ # For a single date, just return the date we pulled out.
+ if self.main_type == DateParser.PARSE_TYPE_SINGLE_DATE_EXPR:
+ return
+
+ # Otherwise treat self.date as a base date that we're modifying
+ # with an offset.
+ if not "delta_int" in self.context:
+ raise ParseException("Missing delta_int?!")
+ count = self.context["delta_int"]
+ if count == 0:
+ return
+
+ # Adjust count's sign based on the presence of 'before' or 'after'.
+ if "delta_before_after" in self.context:
+ before_after = self.context["delta_before_after"].lower()
+ if before_after == "before":
+ count = -count
+
+ # What are we counting units of?
+ if "delta_unit" not in self.context:
+ raise ParseException("Missing delta_unit?!")
+ unit = self.context["delta_unit"]
+ if unit == DateParser.CONSTANT_DAYS:
+ timedelta = datetime.timedelta(days=count)
+ self.date = self.date + timedelta
+ elif unit == DateParser.CONSTANT_WEEKS:
+ timedelta = datetime.timedelta(weeks=count)
+ self.date = self.date + timedelta
+ else:
+ direction = 1 if count > 0 else -1
+ count = abs(count)
+ timedelta = datetime.timedelta(days=direction)
+
+ while True:
+ dow = self.date.weekday()
+ if dow == unit:
+ count -= 1
+ if count == 0:
+ return
+ self.date = self.date + timedelta
+
+ def enterDeltaInt(self, ctx: dateparse_utilsParser.DeltaIntContext) -> None:
+ try:
+ i = int(ctx.getText())
+ except:
+ raise ParseException(f"Bad delta int: {ctx.getText()}")
+ else:
+ self.context["delta_int"] = i
+
+ def enterDeltaUnit(
+ self, ctx: dateparse_utilsParser.DeltaUnitContext
+ ) -> None:
+ try:
+ txt = ctx.getText().lower()[:3]
+ if txt in self.day_name_to_number:
+ txt = self.day_name_to_number[txt]
+ elif txt in self.delta_unit_to_constant:
+ txt = self.delta_unit_to_constant[txt]
+ else:
+ raise ParseException(f"Bad delta unit: {ctx.getText()}")
+ except:
+ raise ParseException(f"Bad delta unit: {ctx.getText()}")
+ else:
+ self.context["delta_unit"] = txt
+
+ def enterDeltaBeforeAfter(
+ self, ctx: dateparse_utilsParser.DeltaBeforeAfterContext
+ ) -> None:
+ try:
+ txt = ctx.getText().lower()
+ except:
+ raise ParseException(f"Bad delta before|after: {ctx.getText()}")
+ else:
+ self.context["delta_before_after"] = txt
+
+ def exitNthWeekdayInMonthMaybeYearExpr(
+ self, ctx: dateparse_utilsParser.NthWeekdayInMonthMaybeYearExprContext
+ ) -> None:
+ """Do a bunch of work to convert expressions like...
+
+ 'the 2nd Friday of June' -and-
+ 'the last Wednesday in October'
+
+ ...into base + offset expressions instead.
+ """
+ try:
+ if "nth" not in self.context:
+ raise ParseException(f"Missing nth number: {ctx.getText()}")
+ n = self.context["nth"]
+ if n < 1 or n > 5: # months never have more than 5 Foodays
+ if n != -1:
+ raise ParseException(f"Invalid nth number: {ctx.getText()}")
+ del self.context["nth"]
+ self.context["delta_int"] = n
+
+ year = self.context.get("year", datetime.date.today().year)
+ if "month" not in self.context:
+ raise ParseException(
+ f"Missing month expression: {ctx.getText()}"
+ )
+ month = self.context["month"]
+
+ dow = self.context["dow"]
+ del self.context["dow"]
+ self.context["delta_unit"] = dow
+
+ # For the nth Fooday in Month, start at the 1st of the
+ # month and count ahead N Foodays. For the last Fooday in
+ # Month, start at the last of the month and count back one
+ # Fooday.
+ if n == -1:
+ month += 1
+ if month == 13:
+ month = 1
+ year += 1
+ tmp_date = datetime.date(year=year, month=month, day=1)
+ tmp_date = tmp_date - datetime.timedelta(days=1)
+
+ self.context["year"] = tmp_date.year
+ self.context["month"] = tmp_date.month
+ self.context["day"] = tmp_date.day
+
+ # The delta adjustment code can handle the case where
+ # the last day of the month is the day we're looking
+ # for already.
+ else:
+ self.context["year"] = year
+ self.context["month"] = month
+ self.context["day"] = 1
+ self.main_type = DateParser.PARSE_TYPE_BASE_AND_OFFSET_EXPR
+ except:
+ raise ParseException(
+ f"Invalid nthWeekday expression: {ctx.getText()}"
+ )
+
+ def exitFirstLastWeekdayInMonthMaybeYearExpr(
+ self,
+ ctx: dateparse_utilsParser.FirstLastWeekdayInMonthMaybeYearExprContext,
+ ) -> None:
+ self.exitNthWeekdayInMonthMaybeYearExpr(ctx)
+
+ def enterNth(self, ctx: dateparse_utilsParser.NthContext) -> None:
+ try:
+ i = ctx.getText()
+ m = re.match("\d+[a-z][a-z]", i)
+ if m is not None:
+ i = i[:-2]
+ i = int(i)
+ except:
+ raise ParseException(f"Bad nth expression: {ctx.getText()}")
+ else:
+ self.context["nth"] = i
+
+ def enterFirstOrLast(
+ self, ctx: dateparse_utilsParser.FirstOrLastContext
+ ) -> None:
+ try:
+ txt = ctx.getText()
+ if txt == "first":
+ txt = 1
+ elif txt == "last":
+ txt = -1
+ else:
+ raise ParseException(
+ f"Bad first|last expression: {ctx.getText()}"
+ )
+ except:
+ raise ParseException(f"Bad first|last expression: {ctx.getText()}")
+ else:
+ self.context["nth"] = txt
+
+ def enterDayName(self, ctx: dateparse_utilsParser.DayNameContext) -> None:
+ try:
+ dow = ctx.getText().lower()[:3]
+ dow = self.day_name_to_number.get(dow, None)
+ except:
+ raise ParseException("Bad day of week")
+ else:
+ self.context["dow"] = dow
+
+ def enterDayOfMonth(
+ self, ctx: dateparse_utilsParser.DayOfMonthContext
+ ) -> None:
+ try:
+ day = int(ctx.getText())
+ if day < 1 or day > 31:
+ raise ParseException(
+ f"Bad dayOfMonth expression: {ctx.getText()}"
+ )
+ except:
+ raise ParseException(f"Bad dayOfMonth expression: {ctx.getText()}")
+ self.context["day"] = day
+
+ def enterMonthName(
+ self, ctx: dateparse_utilsParser.MonthNameContext
+ ) -> None:
+ try:
+ month = ctx.getText()
+ month = month.lower()[:3]
+ month = self.month_name_to_number.get(month, None)
+ if month is None:
+ raise ParseException(
+ f"Bad monthName expression: {ctx.getText()}"
+ )
+ except:
+ raise ParseException(f"Bad monthName expression: {ctx.getText()}")
+ else:
+ self.context["month"] = month
+
+ def enterMonthNumber(
+ self, ctx: dateparse_utilsParser.MonthNumberContext
+ ) -> None:
+ try:
+ month = int(ctx.getText())
+ if month < 1 or month > 12:
+ raise ParseException(
+ f"Bad monthNumber expression: {ctx.getText()}"
+ )
+ except:
+ raise ParseException(f"Bad monthNumber expression: {ctx.getText()}")
+ else:
+ self.context["month"] = month
+
+ def enterYear(self, ctx: dateparse_utilsParser.YearContext) -> None:
+ try:
+ year = int(ctx.getText())
+ if year < 1:
+ raise ParseException(f"Bad year expression: {ctx.getText()}")
+ except:
+ raise ParseException(f"Bad year expression: {ctx.getText()}")
+ else:
+ self.context["year"] = year
+
+ def enterSpecialDate(
+ self, ctx: dateparse_utilsParser.SpecialDateContext
+ ) -> None:
+ try:
+ txt = ctx.getText().lower()
+ except:
+ raise ParseException(f"Bad specialDate expression: {ctx.getText()}")
+ else:
+ self.context["special"] = txt
+
+
+def main() -> None:
+ parser = DateParser()
+ for line in sys.stdin:
+ line = line.strip()
+ line = line.lower()
+ line = re.sub(r"#.*$", "", line)
+ if re.match(r"^ *$", line) is not None:
+ continue
+ print(parser.parse_date_string(line))
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main()
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities related to dates and times and datetimes."""
+
+import datetime
+import logging
+import re
+from typing import NewType
+
+import pytz
+
+import constants
+
+logger = logging.getLogger(__name__)
+
+
+def now_pst() -> datetime.datetime:
+ return datetime.datetime.now(tz=pytz.timezone("US/Pacific"))
+
+
+def now() -> datetime.datetime:
+ return datetime.datetime.now()
+
+
+def datetime_to_string(
+ dt: datetime.datetime,
+ *,
+ date_time_separator=" ",
+ include_timezone=True,
+ include_dayname=False,
+ include_seconds=True,
+ include_fractional=False,
+ twelve_hour=True,
+) -> str:
+ """A nice way to convert a datetime into a string."""
+ fstring = ""
+ if include_dayname:
+ fstring += "%a/"
+ fstring = f"%Y/%b/%d{date_time_separator}"
+ if twelve_hour:
+ fstring += "%I:%M"
+ if include_seconds:
+ fstring += ":%S"
+ fstring += "%p"
+ else:
+ fstring += "%H:%M"
+ if include_seconds:
+ fstring += ":%S"
+ if include_fractional:
+ fstring += ".%f"
+ if include_timezone:
+ fstring += "%z"
+ return dt.strftime(fstring).strip()
+
+
+def timestamp() -> str:
+ """Return a timestamp for now in Pacific timezone."""
+ ts = datetime.datetime.now(tz=pytz.timezone("US/Pacific"))
+ return datetime_to_string(ts, include_timezone=True)
+
+
+def time_to_string(
+ dt: datetime.datetime,
+ *,
+ include_seconds=True,
+ include_fractional=False,
+ include_timezone=False,
+ twelve_hour=True,
+) -> str:
+ """A nice way to convert a datetime into a time (only) string."""
+ fstring = ""
+ if twelve_hour:
+ fstring += "%l:%M"
+ if include_seconds:
+ fstring += ":%S"
+ fstring += "%p"
+ else:
+ fstring += "%H:%M"
+ if include_seconds:
+ fstring += ":%S"
+ if include_fractional:
+ fstring += ".%f"
+ if include_timezone:
+ fstring += "%z"
+ return dt.strftime(fstring).strip()
+
+
+def seconds_to_timedelta(seconds: int) -> datetime.timedelta:
+ """Convert a delta in seconds into a timedelta."""
+ return datetime.timedelta(seconds=seconds)
+
+
+MinuteOfDay = NewType("MinuteOfDay", int)
+
+
+def minute_number(hour: int, minute: int) -> MinuteOfDay:
+ """Convert hour:minute into minute number from start of day."""
+ return MinuteOfDay(hour * 60 + minute)
+
+
+def datetime_to_minute_number(dt: datetime.datetime) -> MinuteOfDay:
+ """Convert a datetime into a minute number (of the day)"""
+ return minute_number(dt.hour, dt.minute)
+
+
+def minute_number_to_time_string(minute_num: MinuteOfDay) -> str:
+ """Convert minute number from start of day into hour:minute am/pm string."""
+ hour = minute_num // 60
+ minute = minute_num % 60
+ ampm = "a"
+ if hour > 12:
+ hour -= 12
+ ampm = "p"
+ if hour == 12:
+ ampm = "p"
+ if hour == 0:
+ hour = 12
+ return f"{hour:2}:{minute:02}{ampm}"
+
+
+def parse_duration(duration: str) -> int:
+ """Parse a duration in string form."""
+ seconds = 0
+ m = re.search(r'(\d+) *d[ays]*', duration)
+ if m is not None:
+ seconds += int(m.group(1)) * 60 * 60 * 24
+ m = re.search(r'(\d+) *h[ours]*', duration)
+ if m is not None:
+ seconds += int(m.group(1)) * 60 * 60
+ m = re.search(r'(\d+) *m[inutes]*', duration)
+ if m is not None:
+ seconds += int(m.group(1)) * 60
+ m = re.search(r'(\d+) *s[econds]*', duration)
+ if m is not None:
+ seconds += int(m.group(1))
+ return seconds
+
+
+def describe_duration(age: int) -> str:
+ """Describe a duration."""
+ days = divmod(age, constants.SECONDS_PER_DAY)
+ hours = divmod(days[1], constants.SECONDS_PER_HOUR)
+ minutes = divmod(hours[1], constants.SECONDS_PER_MINUTE)
+
+ descr = ""
+ if days[0] > 1:
+ descr = f"{int(days[0])} days, "
+ elif days[0] == 1:
+ descr = "1 day, "
+ if hours[0] > 1:
+ descr = descr + f"{int(hours[0])} hours, "
+ elif hours[0] == 1:
+ descr = descr + "1 hour, "
+ if len(descr) > 0:
+ descr = descr + "and "
+ if minutes[0] == 1:
+ descr = descr + "1 minute"
+ else:
+ descr = descr + f"{int(minutes[0])} minutes"
+ return descr
+
+
+def describe_duration_briefly(age: int) -> str:
+ """Describe a duration briefly."""
+ days = divmod(age, constants.SECONDS_PER_DAY)
+ hours = divmod(days[1], constants.SECONDS_PER_HOUR)
+ minutes = divmod(hours[1], constants.SECONDS_PER_MINUTE)
+
+ descr = ""
+ if days[0] > 0:
+ descr = f"{int(days[0])}d "
+ if hours[0] > 0:
+ descr = descr + f"{int(hours[0])}h "
+ descr = descr + f"{int(minutes[0])}m"
+ return descr
--- /dev/null
+#!/usr/bin/env python3
+
+"""Decorators."""
+
+import datetime
+import enum
+import functools
+import logging
+import math
+import multiprocessing
+import random
+import signal
+import sys
+import threading
+import time
+import traceback
+from typing import Callable, Optional
+import warnings
+
+import thread_utils
+
+logger = logging.getLogger(__name__)
+
+
+def timed(func: Callable) -> Callable:
+ """Print the runtime of the decorated function."""
+
+ @functools.wraps(func)
+ def wrapper_timer(*args, **kwargs):
+ start_time = time.perf_counter()
+ value = func(*args, **kwargs)
+ end_time = time.perf_counter()
+ run_time = end_time - start_time
+ msg = f"Finished {func.__name__!r} in {run_time:.4f}s"
+ print(msg)
+ logger.info(msg)
+ return value
+ return wrapper_timer
+
+
+def invocation_logged(func: Callable) -> Callable:
+ """Log the call of a function."""
+
+ @functools.wraps(func)
+ def wrapper_invocation_logged(*args, **kwargs):
+ now = datetime.datetime.now()
+ ts = now.strftime("%Y/%d/%b:%H:%M:%S%Z")
+ msg = f"[{ts}]: Entered {func.__name__}"
+ print(msg)
+ logger.info(msg)
+ ret = func(*args, **kwargs)
+ now = datetime.datetime.now()
+ ts = now.strftime("%Y/%d/%b:%H:%M:%S%Z")
+ msg = f"[{ts}]: Exited {func.__name__}"
+ print(msg)
+ logger.info(msg)
+ return ret
+ return wrapper_invocation_logged
+
+
+def debug_args(func: Callable) -> Callable:
+ """Print the function signature and return value at each call."""
+
+ @functools.wraps(func)
+ def wrapper_debug_args(*args, **kwargs):
+ args_repr = [f"{repr(a)}:{type(a)}" for a in args]
+ kwargs_repr = [f"{k}={v!r}:{type(v)}" for k, v in kwargs.items()]
+ signature = ", ".join(args_repr + kwargs_repr)
+ msg = f"Calling {func.__name__}({signature})"
+ print(msg)
+ logger.info(msg)
+ value = func(*args, **kwargs)
+ msg = f"{func.__name__!r} returned {value!r}:{type(value)}"
+ logger.info(msg)
+ return value
+ return wrapper_debug_args
+
+
+def debug_count_calls(func: Callable) -> Callable:
+ """Count function invocations and print a message befor every call."""
+
+ @functools.wraps(func)
+ def wrapper_debug_count_calls(*args, **kwargs):
+ wrapper_debug_count_calls.num_calls += 1
+ msg = f"Call #{wrapper_debug_count_calls.num_calls} of {func.__name__!r}"
+ print(msg)
+ logger.info(msg)
+ return func(*args, **kwargs)
+ wrapper_debug_count_calls.num_calls = 0
+ return wrapper_debug_count_calls
+
+
+class DelayWhen(enum.Enum):
+ BEFORE_CALL = 1
+ AFTER_CALL = 2
+ BEFORE_AND_AFTER = 3
+
+
+def delay(
+ _func: Callable = None,
+ *,
+ seconds: float = 1.0,
+ when: DelayWhen = DelayWhen.BEFORE_CALL,
+) -> Callable:
+ """Delay the execution of a function by sleeping before and/or after.
+
+ Slow down a function by inserting a delay before and/or after its
+ invocation.
+ """
+
+ def decorator_delay(func: Callable) -> Callable:
+ @functools.wraps(func)
+ def wrapper_delay(*args, **kwargs):
+ if when & DelayWhen.BEFORE_CALL:
+ logger.debug(
+ f"@delay for {seconds}s BEFORE_CALL to {func.__name__}"
+ )
+ time.sleep(seconds)
+ retval = func(*args, **kwargs)
+ if when & DelayWhen.AFTER_CALL:
+ logger.debug(
+ f"@delay for {seconds}s AFTER_CALL to {func.__name__}"
+ )
+ time.sleep(seconds)
+ return retval
+ return wrapper_delay
+
+ if _func is None:
+ return decorator_delay
+ else:
+ return decorator_delay(_func)
+
+
+class _SingletonWrapper:
+ """
+ A singleton wrapper class. Its instances would be created
+ for each decorated class.
+ """
+
+ def __init__(self, cls):
+ self.__wrapped__ = cls
+ self._instance = None
+
+ def __call__(self, *args, **kwargs):
+ """Returns a single instance of decorated class"""
+ logger.debug(
+ f"@singleton returning global instance of {self.__wrapped__.__name__}"
+ )
+ if self._instance is None:
+ self._instance = self.__wrapped__(*args, **kwargs)
+ return self._instance
+
+
+def singleton(cls):
+ """
+ A singleton decorator. Returns a wrapper objects. A call on that object
+ returns a single instance object of decorated class. Use the __wrapped__
+ attribute to access decorated class directly in unit tests
+ """
+ return _SingletonWrapper(cls)
+
+
+def memoized(func: Callable) -> Callable:
+ """Keep a cache of previous function call results.
+
+ The cache here is a dict with a key based on the arguments to the
+ call. Consider also: functools.lru_cache for a more advanced
+ implementation.
+ """
+
+ @functools.wraps(func)
+ def wrapper_memoized(*args, **kwargs):
+ cache_key = args + tuple(kwargs.items())
+ if cache_key not in wrapper_memoized.cache:
+ value = func(*args, **kwargs)
+ logger.debug(
+ f"Memoizing {cache_key} => {value} for {func.__name__}"
+ )
+ wrapper_memoized.cache[cache_key] = value
+ else:
+ logger.debug(f"Returning memoized value for {func.__name__}")
+ return wrapper_memoized.cache[cache_key]
+ wrapper_memoized.cache = dict()
+ return wrapper_memoized
+
+
+def retry_predicate(
+ tries: int,
+ *,
+ predicate: Callable[..., bool],
+ delay_sec: float = 3,
+ backoff: float = 2.0,
+):
+ """Retries a function or method up to a certain number of times
+ with a prescribed initial delay period and backoff rate.
+
+ tries is the maximum number of attempts to run the function.
+ delay_sec sets the initial delay period in seconds.
+ backoff is a multiplied (must be >1) used to modify the delay.
+ predicate is a function that will be passed the retval of the
+ decorated function and must return True to stop or False to
+ retry.
+ """
+ if backoff < 1:
+ msg = f"backoff must be greater than or equal to 1, got {backoff}"
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ tries = math.floor(tries)
+ if tries < 0:
+ msg = f"tries must be 0 or greater, got {tries}"
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ if delay_sec <= 0:
+ msg = f"delay_sec must be greater than 0, got {delay_sec}"
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ def deco_retry(f):
+ @functools.wraps(f)
+ def f_retry(*args, **kwargs):
+ mtries, mdelay = tries, delay_sec # make mutable
+ retval = f(*args, **kwargs)
+ while mtries > 0:
+ if predicate(retval) is True:
+ return retval
+ logger.debug("Predicate failed, sleeping and retrying.")
+ mtries -= 1
+ time.sleep(mdelay)
+ mdelay *= backoff
+ retval = f(*args, **kwargs)
+ return retval
+ return f_retry
+ return deco_retry
+
+
+def retry_if_false(tries: int, *, delay_sec=3.0, backoff=2.0):
+ return retry_predicate(
+ tries,
+ predicate=lambda x: x is True,
+ delay_sec=delay_sec,
+ backoff=backoff,
+ )
+
+
+def retry_if_none(tries: int, *, delay_sec=3.0, backoff=2.0):
+ return retry_predicate(
+ tries,
+ predicate=lambda x: x is not None,
+ delay_sec=delay_sec,
+ backoff=backoff,
+ )
+
+
+def deprecated(func):
+ """This is a decorator which can be used to mark functions
+ as deprecated. It will result in a warning being emitted
+ when the function is used.
+ """
+
+ @functools.wraps(func)
+ def wrapper_deprecated(*args, **kwargs):
+ msg = f"Call to deprecated function {func.__name__}"
+ logger.warning(msg)
+ warnings.warn(msg, category=DeprecationWarning)
+ return func(*args, **kwargs)
+
+ return wrapper_deprecated
+
+
+def thunkify(func):
+ """
+ Make a function immediately return a function of no args which,
+ when called, waits for the result, which will start being
+ processed in another thread.
+ """
+
+ @functools.wraps(func)
+ def lazy_thunked(*args, **kwargs):
+ wait_event = threading.Event()
+
+ result = [None]
+ exc = [False, None]
+
+ def worker_func():
+ try:
+ func_result = func(*args, **kwargs)
+ result[0] = func_result
+ except Exception:
+ exc[0] = True
+ exc[1] = sys.exc_info() # (type, value, traceback)
+ msg = f"Thunkify has thrown an exception (will be raised on thunk()):\n{traceback.format_exc()}"
+ logger.warning(msg)
+ print(msg)
+ finally:
+ wait_event.set()
+
+ def thunk():
+ wait_event.wait()
+ if exc[0]:
+ raise exc[1][0](exc[1][1])
+ return result[0]
+
+ threading.Thread(target=worker_func).start()
+ return thunk
+
+ return lazy_thunked
+
+
+############################################################
+# Timeout
+############################################################
+
+# http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/
+# Used work of Stephen "Zero" Chappell <Noctis.Skytower@gmail.com>
+# in https://code.google.com/p/verse-quiz/source/browse/trunk/timeout.py
+
+
+class TimeoutError(AssertionError):
+ def __init__(self, value: str = "Timed Out"):
+ self.value = value
+
+ def __str__(self):
+ return repr(self.value)
+
+
+def _raise_exception(exception, error_message: Optional[str]):
+ if error_message is None:
+ raise exception()
+ else:
+ raise exception(error_message)
+
+
+def _target(queue, function, *args, **kwargs):
+ """Run a function with arguments and return output via a queue.
+
+ This is a helper function for the Process created in _Timeout. It runs
+ the function with positional arguments and keyword arguments and then
+ returns the function's output by way of a queue. If an exception gets
+ raised, it is returned to _Timeout to be raised by the value property.
+ """
+ try:
+ queue.put((True, function(*args, **kwargs)))
+ except:
+ queue.put((False, sys.exc_info()[1]))
+
+
+class _Timeout(object):
+ """Wrap a function and add a timeout (limit) attribute to it.
+
+ Instances of this class are automatically generated by the add_timeout
+ function defined below.
+ """
+
+ def __init__(
+ self,
+ function: Callable,
+ timeout_exception: Exception,
+ error_message: str,
+ seconds: float,
+ ):
+ self.__limit = seconds
+ self.__function = function
+ self.__timeout_exception = timeout_exception
+ self.__error_message = error_message
+ self.__name__ = function.__name__
+ self.__doc__ = function.__doc__
+ self.__timeout = time.time()
+ self.__process = multiprocessing.Process()
+ self.__queue: multiprocessing.queues.Queue = multiprocessing.Queue()
+
+ def __call__(self, *args, **kwargs):
+ """Execute the embedded function object asynchronously.
+
+ The function given to the constructor is transparently called and
+ requires that "ready" be intermittently polled. If and when it is
+ True, the "value" property may then be checked for returned data.
+ """
+ self.__limit = kwargs.pop("timeout", self.__limit)
+ self.__queue = multiprocessing.Queue(1)
+ args = (self.__queue, self.__function) + args
+ self.__process = multiprocessing.Process(
+ target=_target, args=args, kwargs=kwargs
+ )
+ self.__process.daemon = True
+ self.__process.start()
+ if self.__limit is not None:
+ self.__timeout = self.__limit + time.time()
+ while not self.ready:
+ time.sleep(0.1)
+ return self.value
+
+ def cancel(self):
+ """Terminate any possible execution of the embedded function."""
+ if self.__process.is_alive():
+ self.__process.terminate()
+ _raise_exception(self.__timeout_exception, self.__error_message)
+
+ @property
+ def ready(self):
+ """Read-only property indicating status of "value" property."""
+ if self.__limit and self.__timeout < time.time():
+ self.cancel()
+ return self.__queue.full() and not self.__queue.empty()
+
+ @property
+ def value(self):
+ """Read-only property containing data returned from function."""
+ if self.ready is True:
+ flag, load = self.__queue.get()
+ if flag:
+ return load
+ raise load
+
+
+def timeout(
+ seconds: float = 1.0,
+ use_signals: Optional[bool] = None,
+ timeout_exception=TimeoutError,
+ error_message="Function call timed out",
+):
+ """Add a timeout parameter to a function and return the function.
+
+ Note: the use_signals parameter is included in order to support
+ multiprocessing scenarios (signal can only be used from the process'
+ main thread). When not using signals, timeout granularity will be
+ rounded to the nearest 0.1s.
+
+ Raises an exception when the timeout is reached.
+
+ It is illegal to pass anything other than a function as the first
+ parameter. The function is wrapped and returned to the caller.
+ """
+ if use_signals is None:
+ use_signals = thread_utils.is_current_thread_main_thread()
+
+ def decorate(function):
+
+ if use_signals:
+
+ def handler(signum, frame):
+ _raise_exception(timeout_exception, error_message)
+
+ @functools.wraps(function)
+ def new_function(*args, **kwargs):
+ new_seconds = kwargs.pop("timeout", seconds)
+ if new_seconds:
+ old = signal.signal(signal.SIGALRM, handler)
+ signal.setitimer(signal.ITIMER_REAL, new_seconds)
+
+ if not seconds:
+ return function(*args, **kwargs)
+
+ try:
+ return function(*args, **kwargs)
+ finally:
+ if new_seconds:
+ signal.setitimer(signal.ITIMER_REAL, 0)
+ signal.signal(signal.SIGALRM, old)
+
+ return new_function
+ else:
+
+ @functools.wraps(function)
+ def new_function(*args, **kwargs):
+ timeout_wrapper = _Timeout(
+ function, timeout_exception, error_message, seconds
+ )
+ return timeout_wrapper(*args, **kwargs)
+
+ return new_function
+
+ return decorate
+
+
+class non_reentrant_code(object):
+ def __init__(self):
+ self._lock = threading.RLock
+ self._entered = False
+
+ def __call__(self, f):
+ def _gatekeeper(*args, **kwargs):
+ with self._lock:
+ if self._entered:
+ return
+ self._entered = True
+ f(*args, **kwargs)
+ self._entered = False
+
+ return _gatekeeper
+
+
+class rlocked(object):
+ def __init__(self):
+ self._lock = threading.RLock
+ self._entered = False
+
+ def __call__(self, f):
+ def _gatekeeper(*args, **kwargs):
+ with self._lock:
+ if self._entered:
+ return
+ self._entered = True
+ f(*args, **kwargs)
+ self._entered = False
+ return _gatekeeper
+
+
+def call_with_sample_rate(sample_rate: float) -> Callable:
+ if not 0.0 <= sample_rate <= 1.0:
+ msg = f"sample_rate must be between [0, 1]. Got {sample_rate}."
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ def decorator(f):
+ @functools.wraps(f)
+ def _call_with_sample_rate(*args, **kwargs):
+ if random.uniform(0, 1) < sample_rate:
+ return f(*args, **kwargs)
+ else:
+ logger.debug(
+ f"@call_with_sample_rate skipping a call to {f.__name__}"
+ )
+ return _call_with_sample_rate
+ return decorator
--- /dev/null
+#!/usr/bin/env python3
+
+from abc import ABC, abstractmethod
+from typing import Any, Generic, TypeVar
+
+T = TypeVar('T')
+
+
+class DeferredOperand(ABC, Generic[T]):
+ """A wrapper around an operand whose value is deferred until it is
+ needed. See subclass SmartFuture for an example usage.
+ """
+
+ @abstractmethod
+ def _resolve(self) -> T:
+ pass
+
+ @staticmethod
+ def resolve(x: Any) -> Any:
+ while isinstance(x, DeferredOperand):
+ x = x._resolve()
+ return x
+
+ def __lt__(self, other: Any) -> bool:
+ return DeferredOperand.resolve(self) < DeferredOperand.resolve(other)
+
+ def __le__(self, other: Any) -> bool:
+ return DeferredOperand.resolve(self) <= DeferredOperand.resolve(other)
+
+ def __eq__(self, other: Any) -> bool:
+ return DeferredOperand.resolve(self) == DeferredOperand.resolve(other)
+
+ def __ne__(self, other: Any) -> bool:
+ return DeferredOperand.resolve(self) != DeferredOperand.resolve(other)
+
+ def __gt__(self, other: Any) -> bool:
+ return DeferredOperand.resolve(self) > DeferredOperand.resolve(other)
+
+ def __ge__(self, other: Any) -> bool:
+ return DeferredOperand.resolve(self) >= DeferredOperand.resolve(other)
+
+ def __not__(self) -> bool:
+ return not DeferredOperand.resolve(self)
+
+ def bool(self) -> bool:
+ return DeferredOperand.resolve(self)
+
+ def __add__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) + DeferredOperand.resolve(other)
+
+ def __iadd__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) + DeferredOperand.resolve(other)
+
+ def __radd__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) + DeferredOperand.resolve(other)
+
+ def __sub__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) - DeferredOperand.resolve(other)
+
+ def __mul__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) * DeferredOperand.resolve(other)
+
+ def __pow__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) ** DeferredOperand.resolve(other)
+
+ def __truediv__(self, other: Any) -> Any:
+ return DeferredOperand.resolve(self) / DeferredOperand.resolve(other)
+
+ def __floordiv__(self, other: Any) -> T:
+ return DeferredOperand.resolve(self) // DeferredOperand.resolve(other)
+
+ def __contains__(self, other):
+ return DeferredOperand.resolve(other) in DeferredOperand.resolve(self)
+
+ def and_(self, other):
+ return DeferredOperand.resolve(self) & DeferredOperand.resolve(other)
+
+ def or_(self, other):
+ return DeferredOperand.resolve(self) & DeferredOperand.resolve(other)
+
+ def xor(self, other):
+ return DeferredOperand.resolve(self) & DeferredOperand.resolve(other)
+
+ def invert(self):
+ return ~(DeferredOperand.resolve(self))
+
+ def is_(self, other):
+ return DeferredOperand.resolve(self) is DeferredOperand.resolve(other)
+
+ def is_not(self, other):
+ return DeferredOperand.resolve(self) is not DeferredOperand.resolve(other)
+
+ def __abs__(self):
+ return abs(DeferredOperand.resolve(self))
+
+ def setitem(self, k, v):
+ DeferredOperand.resolve(self)[DeferredOperand.resolve(k)] = v
+
+ def delitem(self, k):
+ del DeferredOperand.resolve(self)[DeferredOperand.resolve(k)]
+
+ def getitem(self, k):
+ return DeferredOperand.resolve(self)[DeferredOperand.resolve(k)]
+
+ def lshift(self, other):
+ return DeferredOperand.resolve(self) << DeferredOperand.resolve(other)
+
+ def rshift(self, other):
+ return DeferredOperand.resolve(self) >> DeferredOperand.resolve(other)
+
+ def mod(self, other):
+ return DeferredOperand.resolve(self) % DeferredOperand.resolve(other)
+
+ def matmul(self, other):
+ return DeferredOperand.resolve(self) @ DeferredOperand.resolve(other)
+
+ def neg(self):
+ return -(DeferredOperand.resolve(self))
+
+ def pos(self):
+ return +(DeferredOperand.resolve(self))
+
+ def truth(self):
+ return DeferredOperand.resolve(self)
+
+ def __hash__(self):
+ return DeferredOperand.resolve(self).__hash__()
+
+ def __call__(self):
+ return DeferredOperand.resolve(self)()
+
+ def __iter__(self):
+ return DeferredOperand.resolve(self).__iter__()
+
+ def __repr__(self) -> str:
+ return DeferredOperand.resolve(self).__repr__()
+
+ def __bytes__(self) -> bytes:
+ return DeferredOperand.resolve(self).__bytes__()
+
+ def __int__(self) -> int:
+ return int(DeferredOperand.resolve(self))
+
+ def __float__(self) -> float:
+ return float(DeferredOperand.resolve(self))
+
+ def __getattr__(self, method_name):
+ def method(*args, **kwargs):
+ return getattr(DeferredOperand.resolve(self), method_name)(
+ *args, **kwargs
+ )
+ return method
--- /dev/null
+#!/usr/bin/env python3
+
+from itertools import islice
+from typing import Any, Callable, Dict, Iterator
+
+
+def init_or_inc(
+ d: Dict[Any, Any],
+ key: Any,
+ *,
+ init_value: Any = 1,
+ inc_function: Callable[..., Any] = lambda x: x + 1
+) -> bool:
+ if key in d.keys():
+ d[key] = inc_function(d[key])
+ return True
+ d[key] = init_value
+ return False
+
+
+def shard(d: Dict[Any, Any], size: int) -> Iterator[Dict[Any, Any]]:
+ items = d.items()
+ for x in range(0, len(d), size):
+ yield {key: value for (key, value) in islice(items, x, x + size)}
+
+
+def item_with_max_value(d: Dict[Any, Any]) -> Any:
+ return max(d.items(), key=lambda _: _[1])
+
+
+def item_with_min_value(d: Dict[Any, Any]) -> Any:
+ return min(d.items(), key=lambda _: _[1])
+
+
+def key_with_max_value(d: Dict[Any, Any]) -> Any:
+ return item_with_max_value(d)[0]
+
+
+def key_with_min_value(d: Dict[Any, Any]) -> Any:
+ return item_with_min_value(d)[0]
+
+
+def max_value(d: Dict[Any, Any]) -> Any:
+ return item_with_max_value(d)[1]
+
+
+def min_value(d: Dict[Any, Any]) -> Any:
+ return item_with_min_value(d)[1]
+
+
+def max_key(d: Dict[Any, Any]) -> Any:
+ return max(d.keys())
+
+
+def min_key(d: Dict[Any, Any]) -> Any:
+ return min(d.keys())
+
+
+def merge(a: Dict[Any, Any], b: Dict[Any, Any], path=None) -> Dict[Any, Any]:
+ if path is None:
+ path = []
+ for key in b:
+ if key in a:
+ if isinstance(a[key], dict) and isinstance(b[key], dict):
+ merge(a[key], b[key], path + [str(key)])
+ elif a[key] == b[key]:
+ pass
+ else:
+ raise Exception("Conflict at %s" % ".".join(path + [str(key)]))
+ else:
+ a[key] = b[key]
+ return a
--- /dev/null
+#!/usr/bin/env python3
+
+import shlex
+import subprocess
+from typing import List
+
+
+def cmd_with_timeout(command: str, timeout_seconds: float) -> int:
+ return subprocess.check_call(
+ ["/bin/bash", "-c", command], timeout=timeout_seconds
+ )
+
+
+def cmd(command: str) -> str:
+ """Run a command with everything encased in a string and return
+ the output text as a string. Raises subprocess.CalledProcessError.
+ """
+ ret = subprocess.run(
+ command, shell=True, capture_output=True, check=True
+ ).stdout
+ return ret.decode("utf-8")
+
+
+def run_silently(command: str) -> None:
+ """Run a command silently but raise subprocess.CalledProcessError if
+ it fails."""
+ subprocess.run(
+ command, shell=True, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL,
+ capture_output=False, check=True
+ )
+
+
+def cmd_in_background(command: str, *, silent: bool = False) -> subprocess.Popen:
+ args = shlex.split(command)
+ if silent:
+ return subprocess.Popen(args,
+ stdin=subprocess.DEVNULL,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ else:
+ return subprocess.Popen(args,
+ stdin=subprocess.DEVNULL)
+
+
+def cmd_list(command: List[str]) -> str:
+ """Run a command with args encapsulated in a list and return the
+ output text as a string. Raises subprocess.CalledProcessError.
+ """
+ ret = subprocess.run(command, capture_output=True, check=True).stdout
+ return ret.decode("utf-8")
--- /dev/null
+#!/usr/bin/env python3
+
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+import concurrent.futures as fut
+from collections import defaultdict
+from dataclasses import dataclass
+import logging
+import numpy
+import os
+import platform
+import random
+import subprocess
+import threading
+import time
+from typing import Any, Callable, Dict, List, Optional, Set
+
+import cloudpickle # type: ignore
+
+from ansi import bg, fg, underline, reset
+import argparse_utils
+import config
+import exec_utils
+from decorator_utils import singleton
+import histogram
+import string_utils
+
+logger = logging.getLogger(__name__)
+
+parser = config.add_commandline_args(
+ f"Executors ({__file__})",
+ "Args related to processing executors."
+)
+parser.add_argument(
+ '--executors_threadpool_size',
+ type=int,
+ metavar='#THREADS',
+ help='Number of threads in the default threadpool, leave unset for default',
+ default=None
+)
+parser.add_argument(
+ '--executors_processpool_size',
+ type=int,
+ metavar='#PROCESSES',
+ help='Number of processes in the default processpool, leave unset for default',
+ default=None,
+)
+parser.add_argument(
+ '--executors_schedule_remote_backups',
+ default=True,
+ action=argparse_utils.ActionNoYes,
+ help='Should we schedule duplicative backup work if a remote bundle is slow',
+)
+
+rsync = 'rsync -q --no-motd -W --ignore-existing --timeout=60 --size-only -z'
+ssh = 'ssh -oForwardX11=no'
+
+
+hist = histogram.SimpleHistogram(
+ histogram.SimpleHistogram.n_evenly_spaced_buckets(
+ int(0), int(500), 25
+ )
+)
+
+
+def run_local_bundle(fun, *args, **kwargs):
+ logger.debug(f"Running local bundle at {fun.__name__}")
+ start = time.time()
+ result = fun(*args, **kwargs)
+ end = time.time()
+ duration = end - start
+ logger.debug(f"{fun.__name__} finished; used {duration:.1f}s")
+ hist.add_item(duration)
+ return result
+
+
+def run_cloud_pickle(pickle):
+ fun, args, kwargs = cloudpickle.loads(pickle)
+ logger.debug(f"Running pickled bundle at {fun.__name__}")
+ start = time.time()
+ result = fun(*args, **kwargs)
+ end = time.time()
+ duration = end - start
+ logger.debug(f"{fun.__name__} finished; used {duration:.1f}s")
+ return result
+
+
+def make_cloud_pickle(fun, *args, **kwargs):
+ logger.info(f"Making cloudpickled bundle at {fun.__name__}")
+ return cloudpickle.dumps((fun, args, kwargs))
+
+
+class BaseExecutor(ABC):
+ def __init__(self):
+ pass
+
+ @abstractmethod
+ def submit(self,
+ function: Callable,
+ *args,
+ **kwargs) -> fut.Future:
+ pass
+
+ @abstractmethod
+ def shutdown(self,
+ wait: bool = True) -> None:
+ pass
+
+
+class ThreadExecutor(BaseExecutor):
+ def __init__(self,
+ max_workers: Optional[int] = None):
+ super().__init__()
+ workers = None
+ if max_workers is not None:
+ workers = max_workers
+ elif 'executors_threadpool_size' in config.config:
+ workers = config.config['executors_threadpool_size']
+ logger.debug(f'Creating threadpool executor with {workers} workers')
+ self._thread_pool_executor = fut.ThreadPoolExecutor(
+ max_workers=workers,
+ thread_name_prefix="thread_executor_helper"
+ )
+ self.job_count = 0
+
+ def submit(self,
+ function: Callable,
+ *args,
+ **kwargs) -> fut.Future:
+ self.job_count += 1
+ logger.debug(
+ f'Submitted work to threadpool; there are now {self.job_count} items.'
+ )
+ newargs = []
+ newargs.append(function)
+ for arg in args:
+ newargs.append(arg)
+ return self._thread_pool_executor.submit(
+ run_local_bundle,
+ *newargs,
+ **kwargs)
+
+ def shutdown(self,
+ wait = True) -> None:
+ logger.debug("Shutting down threadpool executor.")
+ print(hist)
+ self._thread_pool_executor.shutdown(wait)
+
+
+class ProcessExecutor(BaseExecutor):
+ def __init__(self,
+ max_workers=None):
+ super().__init__()
+ workers = None
+ if max_workers is not None:
+ workers = max_workers
+ elif 'executors_processpool_size' in config.config:
+ workers = config.config['executors_processpool_size']
+ logger.debug(f'Creating processpool executor with {workers} workers.')
+ self._process_executor = fut.ProcessPoolExecutor(
+ max_workers=workers,
+ )
+ self.job_count = 0
+
+ def submit(self,
+ function: Callable,
+ *args,
+ **kwargs) -> fut.Future:
+ # Bundle it up before submitting because pickle sucks.
+ pickle = make_cloud_pickle(function, *args, **kwargs)
+ self.job_count += 1
+ logger.debug(
+ f'Submitting work to processpool executor; there are now {self.job_count} items.'
+ )
+ return self._process_executor.submit(run_cloud_pickle, pickle)
+
+ def shutdown(self, wait=True) -> None:
+ logger.debug('Shutting down processpool executor')
+ print(hist)
+ self._process_executor.shutdown(wait)
+
+
+@dataclass
+class RemoteWorkerRecord:
+ username: str
+ machine: str
+ weight: int
+ count: int
+
+ def __hash__(self):
+ return hash((self.username, self.machine))
+
+ def __repr__(self):
+ return f'{self.username}@{self.machine}'
+
+
+@dataclass
+class BundleDetails:
+ pickled_code: bytes
+ uuid: str
+ worker: Optional[RemoteWorkerRecord]
+ username: Optional[str]
+ machine: Optional[str]
+ hostname: str
+ code_file: str
+ result_file: str
+ pid: int
+ start_ts: float
+ end_ts: float
+ too_slow: bool
+ super_slow: bool
+ src_bundle: BundleDetails
+ is_cancelled: threading.Event
+ was_cancelled: bool
+ backup_bundles: Optional[List[BundleDetails]]
+
+
+class RemoteExecutorStatus:
+ def __init__(self, total_worker_count: int) -> None:
+ self.worker_count = total_worker_count
+ self.known_workers: Set[RemoteWorkerRecord] = set()
+ self.start_per_bundle: Dict[str, float] = defaultdict(float)
+ self.end_per_bundle: Dict[str, float] = defaultdict(float)
+ self.finished_bundle_timings_per_worker: Dict[
+ RemoteWorkerRecord,
+ List[float]
+ ] = {}
+ self.in_flight_bundles_by_worker: Dict[
+ RemoteWorkerRecord,
+ Set[str]
+ ] = {}
+ self.bundle_details_by_uuid: Dict[str, BundleDetails] = {}
+ self.finished_bundle_timings: List[float] = []
+ self.last_periodic_dump: Optional[float] = None
+ self.total_bundles_submitted = 0
+
+ # Protects reads and modification using self. Also used
+ # as a memory fence for modifications to bundle.
+ self.lock = threading.Lock()
+
+ def record_acquire_worker(
+ self,
+ worker: RemoteWorkerRecord,
+ uuid: str
+ ) -> None:
+ with self.lock:
+ self.record_acquire_worker_already_locked(
+ worker,
+ uuid
+ )
+
+ def record_acquire_worker_already_locked(
+ self,
+ worker: RemoteWorkerRecord,
+ uuid: str
+ ) -> None:
+ assert self.lock.locked()
+ self.known_workers.add(worker)
+ self.start_per_bundle[uuid] = time.time()
+ x = self.in_flight_bundles_by_worker.get(worker, set())
+ x.add(uuid)
+ self.in_flight_bundles_by_worker[worker] = x
+
+ def record_bundle_details(
+ self,
+ details: BundleDetails) -> None:
+ with self.lock:
+ self.record_bundle_details_already_locked(details)
+
+ def record_bundle_details_already_locked(
+ self,
+ details: BundleDetails) -> None:
+ assert self.lock.locked()
+ self.bundle_details_by_uuid[details.uuid] = details
+
+ def record_release_worker_already_locked(
+ self,
+ worker: RemoteWorkerRecord,
+ uuid: str,
+ was_cancelled: bool,
+ ) -> None:
+ assert self.lock.locked()
+ ts = time.time()
+ self.end_per_bundle[uuid] = ts
+ self.in_flight_bundles_by_worker[worker].remove(uuid)
+ if not was_cancelled:
+ bundle_latency = ts - self.start_per_bundle[uuid]
+ x = self.finished_bundle_timings_per_worker.get(worker, list())
+ x.append(bundle_latency)
+ self.finished_bundle_timings_per_worker[worker] = x
+ self.finished_bundle_timings.append(bundle_latency)
+
+ def total_in_flight(self) -> int:
+ assert self.lock.locked()
+ total_in_flight = 0
+ for worker in self.known_workers:
+ total_in_flight += len(self.in_flight_bundles_by_worker[worker])
+ return total_in_flight
+
+ def total_idle(self) -> int:
+ assert self.lock.locked()
+ return self.worker_count - self.total_in_flight()
+
+ def __repr__(self):
+ assert self.lock.locked()
+ ts = time.time()
+ total_finished = len(self.finished_bundle_timings)
+ total_in_flight = self.total_in_flight()
+ ret = f'\n\n{underline()}Remote Executor Pool Status{reset()}: '
+ qall = None
+ if len(self.finished_bundle_timings) > 1:
+ qall = numpy.quantile(self.finished_bundle_timings, [0.5, 0.95])
+ ret += (
+ f'⏱=∀p50:{qall[0]:.1f}s, ∀p95:{qall[1]:.1f}s, '
+ f'✅={total_finished}/{self.total_bundles_submitted}, '
+ f'💻n={total_in_flight}/{self.worker_count}\n'
+ )
+ else:
+ ret += (
+ f' ✅={total_finished}/{self.total_bundles_submitted}, '
+ f'💻n={total_in_flight}/{self.worker_count}\n'
+ )
+
+ for worker in self.known_workers:
+ ret += f' {fg("lightning yellow")}{worker.machine}{reset()}: '
+ timings = self.finished_bundle_timings_per_worker.get(worker, [])
+ count = len(timings)
+ qworker = None
+ if count > 1:
+ qworker = numpy.quantile(timings, [0.5, 0.95])
+ ret += f' 💻p50: {qworker[0]:.1f}s, 💻p95: {qworker[1]:.1f}s\n'
+ else:
+ ret += '\n'
+ if count > 0:
+ ret += f' ...finished {count} total bundle(s) so far\n'
+ in_flight = len(self.in_flight_bundles_by_worker[worker])
+ if in_flight > 0:
+ ret += f' ...{in_flight} bundles currently in flight:\n'
+ for bundle_uuid in self.in_flight_bundles_by_worker[worker]:
+ details = self.bundle_details_by_uuid.get(
+ bundle_uuid,
+ None
+ )
+ pid = str(details.pid) if details is not None else "TBD"
+ sec = ts - self.start_per_bundle[bundle_uuid]
+ ret += f' (pid={pid}): {bundle_uuid} for {sec:.1f}s so far '
+ if qworker is not None:
+ if sec > qworker[1]:
+ ret += f'{bg("red")}>💻p95{reset()} '
+ elif sec > qworker[0]:
+ ret += f'{fg("red")}>💻p50{reset()} '
+ if qall is not None:
+ if sec > qall[1] * 1.5:
+ ret += f'{bg("red")}!!!{reset()}'
+ if details is not None:
+ logger.debug(f'Flagging {details.uuid} for another backup')
+ details.super_slow = True
+ elif sec > qall[1]:
+ ret += f'{bg("red")}>∀p95{reset()} '
+ if details is not None:
+ logger.debug(f'Flagging {details.uuid} for a backup')
+ details.too_slow = True
+ elif sec > qall[0]:
+ ret += f'{fg("red")}>∀p50{reset()}'
+ ret += '\n'
+ return ret
+
+ def periodic_dump(self, total_bundles_submitted: int) -> None:
+ assert self.lock.locked()
+ self.total_bundles_submitted = total_bundles_submitted
+ ts = time.time()
+ if (
+ self.last_periodic_dump is None
+ or ts - self.last_periodic_dump > 5.0
+ ):
+ print(self)
+ self.last_periodic_dump = ts
+
+
+class RemoteWorkerSelectionPolicy(ABC):
+ def register_worker_pool(self, workers):
+ random.seed()
+ self.workers = workers
+
+ @abstractmethod
+ def is_worker_available(self) -> bool:
+ pass
+
+ @abstractmethod
+ def acquire_worker(
+ self,
+ machine_to_avoid = None
+ ) -> Optional[RemoteWorkerRecord]:
+ pass
+
+
+class WeightedRandomRemoteWorkerSelectionPolicy(RemoteWorkerSelectionPolicy):
+ def is_worker_available(self) -> bool:
+ for worker in self.workers:
+ if worker.count > 0:
+ return True
+ return False
+
+ def acquire_worker(
+ self,
+ machine_to_avoid = None
+ ) -> Optional[RemoteWorkerRecord]:
+ grabbag = []
+ for worker in self.workers:
+ for x in range(0, worker.count):
+ for y in range(0, worker.weight):
+ grabbag.append(worker)
+
+ for _ in range(0, 5):
+ random.shuffle(grabbag)
+ worker = grabbag[0]
+ if worker.machine != machine_to_avoid or _ > 2:
+ if worker.count > 0:
+ worker.count -= 1
+ logger.debug(f'Selected worker {worker}')
+ return worker
+ logger.warning("Couldn't find a worker; go fish.")
+ return None
+
+
+class RoundRobinRemoteWorkerSelectionPolicy(RemoteWorkerSelectionPolicy):
+ def __init__(self) -> None:
+ self.index = 0
+
+ def is_worker_available(self) -> bool:
+ for worker in self.workers:
+ if worker.count > 0:
+ return True
+ return False
+
+ def acquire_worker(
+ self,
+ machine_to_avoid: str = None
+ ) -> Optional[RemoteWorkerRecord]:
+ x = self.index
+ while True:
+ worker = self.workers[x]
+ if worker.count > 0:
+ worker.count -= 1
+ x += 1
+ if x >= len(self.workers):
+ x = 0
+ self.index = x
+ logger.debug(f'Selected worker {worker}')
+ return worker
+ x += 1
+ if x >= len(self.workers):
+ x = 0
+ if x == self.index:
+ logger.warning("Couldn't find a worker; go fish.")
+ return None
+
+
+class RemoteExecutor(BaseExecutor):
+ def __init__(self,
+ workers: List[RemoteWorkerRecord],
+ policy: RemoteWorkerSelectionPolicy) -> None:
+ super().__init__()
+ self.workers = workers
+ self.worker_count = 0
+ for worker in self.workers:
+ self.worker_count += worker.count
+ if self.worker_count <= 0:
+ msg = f"We need somewhere to schedule work; count was {self.worker_count}"
+ logger.critical(msg)
+ raise Exception(msg)
+ self.policy = policy
+ self.policy.register_worker_pool(self.workers)
+ self.cv = threading.Condition()
+ self._helper_executor = fut.ThreadPoolExecutor(
+ thread_name_prefix="remote_executor_helper",
+ max_workers=self.worker_count,
+ )
+ self.status = RemoteExecutorStatus(self.worker_count)
+ self.total_bundles_submitted = 0
+ logger.debug(
+ f'Creating remote processpool with {self.worker_count} remote endpoints.'
+ )
+
+ def is_worker_available(self) -> bool:
+ return self.policy.is_worker_available()
+
+ def acquire_worker(
+ self,
+ machine_to_avoid: str = None
+ ) -> Optional[RemoteWorkerRecord]:
+ return self.policy.acquire_worker(machine_to_avoid)
+
+ def find_available_worker_or_block(
+ self,
+ machine_to_avoid: str = None
+ ) -> RemoteWorkerRecord:
+ with self.cv:
+ while not self.is_worker_available():
+ self.cv.wait()
+ worker = self.acquire_worker(machine_to_avoid)
+ if worker is not None:
+ return worker
+ msg = "We should never reach this point in the code"
+ logger.critical(msg)
+ raise Exception(msg)
+
+ def release_worker(self, worker: RemoteWorkerRecord) -> None:
+ logger.debug(f'Released worker {worker}')
+ with self.cv:
+ worker.count += 1
+ self.cv.notify()
+
+ def heartbeat(self) -> None:
+ with self.status.lock:
+ # Regular progress report
+ self.status.periodic_dump(self.total_bundles_submitted)
+
+ # Look for bundles to reschedule
+ if len(self.status.finished_bundle_timings) > 7:
+ for worker, bundle_uuids in self.status.in_flight_bundles_by_worker.items():
+ for uuid in bundle_uuids:
+ bundle = self.status.bundle_details_by_uuid.get(uuid, None)
+ if (
+ bundle is not None and
+ bundle.too_slow and
+ bundle.src_bundle is None and
+ config.config['executors_schedule_remote_backups']
+ ):
+ self.consider_backup_for_bundle(bundle)
+
+ def consider_backup_for_bundle(self, bundle: BundleDetails) -> None:
+ assert self.status.lock.locked()
+ if (
+ bundle.too_slow
+ and len(bundle.backup_bundles) == 0 # one backup per
+ ):
+ msg = f"*** Rescheduling {bundle.pid}/{bundle.uuid} ***"
+ logger.debug(msg)
+ self.schedule_backup_for_bundle(bundle)
+ return
+ elif (
+ bundle.super_slow
+ and len(bundle.backup_bundles) < 2 # two backups in dire situations
+ and self.status.total_idle() > 4
+ ):
+ msg = f"*** Rescheduling {bundle.pid}/{bundle.uuid} ***"
+ logger.debug(msg)
+ self.schedule_backup_for_bundle(bundle)
+ return
+
+ def check_if_cancelled(self, bundle: BundleDetails) -> bool:
+ with self.status.lock:
+ if bundle.is_cancelled.wait(timeout=0.0):
+ logger.debug(f'Bundle {bundle.uuid} is cancelled, bail out.')
+ bundle.was_cancelled = True
+ return True
+ return False
+
+ def launch(self, bundle: BundleDetails) -> Any:
+ # Find a worker for bundle or block until one is available.
+ uuid = bundle.uuid
+ hostname = bundle.hostname
+ avoid_machine = None
+ if bundle.src_bundle is not None:
+ avoid_machine = bundle.src_bundle.machine
+ worker = None
+ while worker is None:
+ worker = self.find_available_worker_or_block(avoid_machine)
+ bundle.worker = worker
+ machine = bundle.machine = worker.machine
+ username = bundle.username = worker.username
+ self.status.record_acquire_worker(worker, uuid)
+ logger.debug(f'Running bundle {uuid} on {worker}...')
+
+ # Before we do work, make sure it's still viable.
+ if self.check_if_cancelled(bundle):
+ return self.post_launch_work(bundle)
+
+ # Send input to machine if it's not local.
+ if hostname not in machine:
+ cmd = f'{rsync} {bundle.code_file} {username}@{machine}:{bundle.code_file}'
+ logger.debug(f"Copying work to {worker} via {cmd}")
+ exec_utils.run_silently(cmd)
+
+ # Before we do more work, make sure it's still viable.
+ if self.check_if_cancelled(bundle):
+ return self.post_launch_work(bundle)
+
+ # Fucking Apple has a python3 binary in /usr/sbin that is not
+ # the one we want and is protected by the OS so make sure that
+ # /usr/local/bin is early in the path.
+ cmd = (f'{ssh} {bundle.username}@{bundle.machine} '
+ f'"export PATH=/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/home/scott/bin:/home/scott/.local/bin; /home/scott/lib/python_modules/remote_worker.py'
+ f' --code_file {bundle.code_file} --result_file {bundle.result_file}"')
+ p = exec_utils.cmd_in_background(cmd, silent=True)
+ bundle.pid = pid = p.pid
+ logger.debug(f"Running {cmd} in the background as process {pid}")
+
+ while True:
+ try:
+ p.wait(timeout=0.5)
+ except subprocess.TimeoutExpired:
+ self.heartbeat()
+
+ # Both source and backup bundles can be cancelled by
+ # the other depending on which finishes first.
+ if self.check_if_cancelled(bundle):
+ p.terminate()
+ break
+ else:
+ logger.debug(
+ f"{pid}/{bundle.uuid} has finished its work normally."
+ )
+ break
+ return self.post_launch_work(bundle)
+
+ def post_launch_work(self, bundle: BundleDetails) -> Any:
+ with self.status.lock:
+ is_original = bundle.src_bundle is None
+ was_cancelled = bundle.was_cancelled
+ username = bundle.username
+ machine = bundle.machine
+ result_file = bundle.result_file
+ code_file = bundle.code_file
+
+ # Whether original or backup, if we finished first we must
+ # fetch the results if the computation happened on a
+ # remote machine.
+ if not was_cancelled:
+ assert bundle.machine is not None
+ if bundle.hostname not in bundle.machine:
+ cmd = f'{rsync} {username}@{machine}:{result_file} {result_file} 2>/dev/null'
+ logger.debug(
+ f"Fetching results from {username}@{machine} via {cmd}"
+ )
+ try:
+ exec_utils.run_silently(cmd)
+ except subprocess.CalledProcessError:
+ pass
+ exec_utils.run_silently(f'{ssh} {username}@{machine}'
+ f' "/bin/rm -f {code_file} {result_file}"')
+ bundle.end_ts = time.time()
+ assert bundle.worker is not None
+ self.status.record_release_worker_already_locked(
+ bundle.worker,
+ bundle.uuid,
+ was_cancelled
+ )
+ if not was_cancelled:
+ dur = bundle.end_ts - bundle.start_ts
+ hist.add_item(dur)
+
+ # Only the original worker should unpickle the file contents
+ # though since it's the only one whose result matters.
+ if is_original:
+ logger.debug(f"Unpickling {result_file}.")
+ with open(f'{result_file}', 'rb') as rb:
+ serialized = rb.read()
+ result = cloudpickle.loads(serialized)
+ os.remove(f'{result_file}')
+ os.remove(f'{code_file}')
+
+ # Notify any backups that the original is done so they
+ # should stop ASAP. Do this whether or not we
+ # finished first since there could be more than one
+ # backup.
+ if bundle.backup_bundles is not None:
+ for backup in bundle.backup_bundles:
+ logger.debug(
+ f'Notifying backup {backup.uuid} that it is cancelled'
+ )
+ backup.is_cancelled.set()
+
+ # This is a backup.
+ else:
+ # Backup results don't matter, they just need to leave the
+ # result file in the right place for their original to
+ # read later.
+ result = None
+
+ # Tell the original to stop if we finished first.
+ if not was_cancelled:
+ logger.debug(
+ f'Notifying original {bundle.src_bundle.uuid} that it is cancelled'
+ )
+ bundle.src_bundle.is_cancelled.set()
+
+ assert bundle.worker is not None
+ self.release_worker(bundle.worker)
+ return result
+
+ def create_original_bundle(self, pickle):
+ uuid = string_utils.generate_uuid(as_hex=True)
+ code_file = f'/tmp/{uuid}.code.bin'
+ result_file = f'/tmp/{uuid}.result.bin'
+
+ logger.debug(f'Writing pickled code to {code_file}')
+ with open(f'{code_file}', 'wb') as wb:
+ wb.write(pickle)
+
+ bundle = BundleDetails(
+ pickled_code = pickle,
+ uuid = uuid,
+ worker = None,
+ username = None,
+ machine = None,
+ hostname = platform.node(),
+ code_file = code_file,
+ result_file = result_file,
+ pid = 0,
+ start_ts = time.time(),
+ end_ts = 0.0,
+ too_slow = False,
+ super_slow = False,
+ src_bundle = None,
+ is_cancelled = threading.Event(),
+ was_cancelled = False,
+ backup_bundles = [],
+ )
+ self.status.record_bundle_details(bundle)
+ logger.debug(f'Created original bundle {uuid}')
+ return bundle
+
+ def create_backup_bundle(self, src_bundle: BundleDetails):
+ assert src_bundle.backup_bundles is not None
+ n = len(src_bundle.backup_bundles)
+ uuid = src_bundle.uuid + f'_backup#{n}'
+
+ backup_bundle = BundleDetails(
+ pickled_code = src_bundle.pickled_code,
+ uuid = uuid,
+ worker = None,
+ username = None,
+ machine = None,
+ hostname = src_bundle.hostname,
+ code_file = src_bundle.code_file,
+ result_file = src_bundle.result_file,
+ pid = 0,
+ start_ts = time.time(),
+ end_ts = 0.0,
+ too_slow = False,
+ super_slow = False,
+ src_bundle = src_bundle,
+ is_cancelled = threading.Event(),
+ was_cancelled = False,
+ backup_bundles = None, # backup backups not allowed
+ )
+ src_bundle.backup_bundles.append(backup_bundle)
+ self.status.record_bundle_details_already_locked(backup_bundle)
+ logger.debug(f'Created backup bundle {uuid}')
+ return backup_bundle
+
+ def schedule_backup_for_bundle(self,
+ src_bundle: BundleDetails):
+ assert self.status.lock.locked()
+ backup_bundle = self.create_backup_bundle(src_bundle)
+ logger.debug(
+ f'Scheduling backup bundle {backup_bundle.uuid} for execution'
+ )
+ self._helper_executor.submit(self.launch, backup_bundle)
+
+ # Results from backups don't matter; if they finish first
+ # they will move the result_file to this machine and let
+ # the original pick them up and unpickle them.
+
+ def submit(self,
+ function: Callable,
+ *args,
+ **kwargs) -> fut.Future:
+ pickle = make_cloud_pickle(function, *args, **kwargs)
+ bundle = self.create_original_bundle(pickle)
+ self.total_bundles_submitted += 1
+ logger.debug(
+ f'Submitted work to remote executor; {self.total_bundles_submitted} items now submitted'
+ )
+ return self._helper_executor.submit(self.launch, bundle)
+
+ def shutdown(self, wait=True) -> None:
+ self._helper_executor.shutdown(wait)
+ print(hist)
+
+
+@singleton
+class DefaultExecutors(object):
+ def __init__(self):
+ self.thread_executor: Optional[ThreadExecutor] = None
+ self.process_executor: Optional[ProcessExecutor] = None
+ self.remote_executor: Optional[RemoteExecutor] = None
+
+ def ping(self, host) -> bool:
+ command = ['ping', '-c', '1', host]
+ return subprocess.call(
+ command,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ ) == 0
+
+ def thread_pool(self) -> ThreadExecutor:
+ if self.thread_executor is None:
+ self.thread_executor = ThreadExecutor()
+ return self.thread_executor
+
+ def process_pool(self) -> ProcessExecutor:
+ if self.process_executor is None:
+ self.process_executor = ProcessExecutor()
+ return self.process_executor
+
+ def remote_pool(self) -> RemoteExecutor:
+ if self.remote_executor is None:
+ pool: List[RemoteWorkerRecord] = []
+ if self.ping('cheetah.house'):
+ pool.append(
+ RemoteWorkerRecord(
+ username = 'scott',
+ machine = 'cheetah.house',
+ weight = 10,
+ count = 6,
+ ),
+ )
+ if self.ping('video.house'):
+ pool.append(
+ RemoteWorkerRecord(
+ username = 'scott',
+ machine = 'video.house',
+ weight = 2,
+ count = 4,
+ ),
+ )
+ if self.ping('wannabe.house'):
+ pool.append(
+ RemoteWorkerRecord(
+ username = 'scott',
+ machine = 'wannabe.house',
+ weight = 2,
+ count = 4,
+ ),
+ )
+ if self.ping('meerkat.cabin'):
+ pool.append(
+ RemoteWorkerRecord(
+ username = 'scott',
+ machine = 'meerkat.cabin',
+ weight = 7,
+ count = 2,
+ ),
+ )
+ if self.ping('backup.house'):
+ pool.append(
+ RemoteWorkerRecord(
+ username = 'scott',
+ machine = 'backup.house',
+ weight = 3,
+ count = 2,
+ ),
+ )
+ if self.ping('puma.cabin'):
+ pool.append(
+ RemoteWorkerRecord(
+ username = 'scott',
+ machine = 'puma.cabin',
+ weight = 10,
+ count = 6,
+ ),
+ )
+ policy = WeightedRandomRemoteWorkerSelectionPolicy()
+ policy.register_worker_pool(pool)
+ self.remote_executor = RemoteExecutor(pool, policy)
+ return self.remote_executor
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities for working with files."""
+
+import datetime
+import errno
+import logging
+import os
+import time
+from typing import Optional
+import glob
+from os.path import isfile, join, exists
+
+import datetime_utils
+
+
+logger = logging.getLogger(__name__)
+
+
+def create_path_if_not_exist(path, on_error=None):
+ """
+ Attempts to create path if it does not exist. If on_error is
+ specified, it is called with an exception if one occurs, otherwise
+ exception is rethrown.
+
+ >>> import uuid
+ >>> import os
+ >>> path = os.path.join("/tmp", str(uuid.uuid4()), str(uuid.uuid4()))
+ >>> os.path.exists(path)
+ False
+ >>> create_path_if_not_exist(path)
+ >>> os.path.exists(path)
+ True
+ """
+ logger.debug(f"Creating path {path}")
+ previous_umask = os.umask(0)
+ try:
+ os.makedirs(path)
+ os.chmod(path, 0o777)
+ except OSError as ex:
+ if ex.errno != errno.EEXIST and not os.path.isdir(path):
+ if on_error is not None:
+ on_error(path, ex)
+ else:
+ raise
+ finally:
+ os.umask(previous_umask)
+
+
+def does_file_exist(filename: str) -> bool:
+ return os.path.exists(filename)
+
+
+def get_file_raw_timestamps(filename: str) -> Optional[os.stat_result]:
+ try:
+ return os.stat(filename)
+ except Exception as e:
+ logger.exception(e)
+ return None
+
+
+def get_file_raw_timestamp(filename: str, extractor) -> Optional[float]:
+ tss = get_file_raw_timestamps(filename)
+ if tss is not None:
+ return extractor(tss)
+ return None
+
+
+def get_file_raw_atime(filename: str) -> Optional[float]:
+ return get_file_raw_timestamp(filename, lambda x: x.st_atime)
+
+
+def get_file_raw_mtime(filename: str) -> Optional[float]:
+ return get_file_raw_timestamp(filename, lambda x: x.st_mtime)
+
+
+def get_file_raw_ctime(filename: str) -> Optional[float]:
+ return get_file_raw_timestamp(filename, lambda x: x.st_ctime)
+
+
+def convert_file_timestamp_to_datetime(
+ filename: str, producer
+) -> Optional[datetime.datetime]:
+ ts = producer(filename)
+ if ts is not None:
+ return datetime.datetime.fromtimestamp(ts)
+ return None
+
+
+def get_file_atime_as_datetime(filename: str) -> Optional[datetime.datetime]:
+ return convert_file_timestamp_to_datetime(filename, get_file_raw_atime)
+
+
+def get_file_mtime_as_datetime(filename: str) -> Optional[datetime.datetime]:
+ return convert_file_timestamp_to_datetime(filename, get_file_raw_mtime)
+
+
+def get_file_ctime_as_datetime(filename: str) -> Optional[datetime.datetime]:
+ return convert_file_timestamp_to_datetime(filename, get_file_raw_ctime)
+
+
+def get_file_timestamp_age_seconds(filename: str, extractor) -> Optional[int]:
+ now = time.time()
+ ts = get_file_raw_timestamps(filename)
+ if ts is None:
+ return None
+ result = extractor(ts)
+ return now - result
+
+
+def get_file_atime_age_seconds(filename: str) -> Optional[int]:
+ return get_file_timestamp_age_seconds(filename, lambda x: x.st_atime)
+
+
+def get_file_ctime_age_seconds(filename: str) -> Optional[int]:
+ return get_file_timestamp_age_seconds(filename, lambda x: x.st_ctime)
+
+
+def get_file_mtime_age_seconds(filename: str) -> Optional[int]:
+ return get_file_timestamp_age_seconds(filename, lambda x: x.st_mtime)
+
+
+def get_file_timestamp_timedelta(
+ filename: str, extractor
+) -> Optional[datetime.timedelta]:
+ age = get_file_timestamp_age_seconds(filename, extractor)
+ if age is not None:
+ return datetime.timedelta(seconds=float(age))
+ return None
+
+
+def get_file_atime_timedelta(filename: str) -> Optional[datetime.timedelta]:
+ return get_file_timestamp_timedelta(filename, lambda x: x.st_atime)
+
+
+def get_file_ctime_timedelta(filename: str) -> Optional[datetime.timedelta]:
+ return get_file_timestamp_timedelta(filename, lambda x: x.st_ctime)
+
+
+def get_file_mtime_timedelta(filename: str) -> Optional[datetime.timedelta]:
+ return get_file_timestamp_timedelta(filename, lambda x: x.st_mtime)
+
+
+def describe_file_timestamp(
+ filename: str, extractor, *, brief=False
+) -> Optional[str]:
+ age = get_file_timestamp_age_seconds(filename, extractor)
+ if age is None:
+ return None
+ if brief:
+ return datetime_utils.describe_duration_briefly(age)
+ else:
+ return datetime_utils.describe_duration(age)
+
+
+def describe_file_atime(filename: str, *, brief=False) -> Optional[str]:
+ return describe_file_timestamp(filename, lambda x: x.st_atime, brief=brief)
+
+
+def describe_file_ctime(filename: str, *, brief=False) -> Optional[str]:
+ return describe_file_timestamp(filename, lambda x: x.st_ctime, brief=brief)
+
+
+def describe_file_mtime(filename: str, *, brief=False) -> Optional[str]:
+ return describe_file_timestamp(filename, lambda x: x.st_mtime, brief=brief)
+
+
+def expand_globs(in_filename: str):
+ for filename in glob.glob(in_filename):
+ yield filename
+
+
+def get_files(directory: str):
+ for filename in os.listdir(directory):
+ full_path = join(directory, filename)
+ if isfile(full_path) and exists(full_path):
+ yield full_path
+
+
+def get_directories(directory: str):
+ for d in os.listdir(directory):
+ full_path = join(directory, d)
+ if not isfile(full_path) and exists(full_path):
+ yield full_path
+
+
+def get_files_recursive(directory: str):
+ for filename in get_files(directory):
+ yield filename
+ for subdir in get_directories(directory):
+ for filename in get_files_recursive(subdir):
+ yield filename
--- /dev/null
+#!/usr/bin/env python3
+
+import logging
+from typing import NamedTuple
+
+import requests
+import speech_recognition as sr # type: ignore
+
+import config
+
+logger = logging.getLogger(__name__)
+
+parser = config.add_commandline_args(
+ f"Google Assistant ({__file__})",
+ "Args related to contacting the Google Assistant",
+)
+parser.add_argument(
+ "--google_assistant_bridge",
+ type=str,
+ default="http://kiosk.house:3000",
+ metavar="URL",
+ help="How to contact the Google Assistant bridge"
+)
+parser.add_argument(
+ "--google_assistant_username",
+ type=str,
+ metavar="GOOGLE_ACCOUNT",
+ default="scott.gasch",
+ help="The user account for talking to Google Assistant"
+)
+
+
+class GoogleResponse(NamedTuple):
+ success: bool
+ response: str
+ audio_url: str
+ audio_transcription: str
+
+ def __repr__(self):
+ return f"""
+success: {self.success}
+response: {self.response}
+audio_transcription: {self.audio_transcription}
+audio_url: {self.audio_url}"""
+
+
+def tell_google(cmd: str, *, recognize_speech=True) -> GoogleResponse:
+ return ask_google(cmd, recognize_speech=recognize_speech)
+
+
+def ask_google(cmd: str, *, recognize_speech=True) -> GoogleResponse:
+ payload = {
+ "command": cmd,
+ "user": config.config['google_assistant_username'],
+ }
+ url = f"{config.config['google_assistant_bridge']}/assistant"
+ r = requests.post(url, json=payload)
+ success = False
+ response = ""
+ audio = ""
+ audio_transcription = ""
+ if r.status_code == 200:
+ j = r.json()
+ success = bool(j["success"])
+ response = j["response"] if success else j["error"]
+ audio = f"{config.config['google_assistant_bridge']}{j['audio']}"
+ if recognize_speech:
+ recognizer = sr.Recognizer()
+ r = requests.get(audio)
+ if r.status_code == 200:
+ raw = r.content
+ speech = sr.AudioData(
+ frame_data=raw,
+ sample_rate=24000,
+ sample_width=2,
+ )
+ audio_transcription = recognizer.recognize_google(
+ speech,
+ )
+ else:
+ logger.error(
+ f'HTTP request to {url} with {payload} failed; code {r.status_code}'
+ )
+ return GoogleResponse(
+ success=success,
+ response=response,
+ audio_url=audio,
+ audio_transcription=audio_transcription,
+ )
--- /dev/null
+#!/usr/bin/env python3
+
+import math
+from numbers import Number
+from typing import Generic, Iterable, List, Optional, Tuple, TypeVar
+
+from math_utils import RunningMedian
+from text_utils import bar_graph
+
+
+T = TypeVar("T", bound=Number)
+
+
+class SimpleHistogram(Generic[T]):
+
+ # Useful in defining wide open bottom/top bucket bounds:
+ POSITIVE_INFINITY = math.inf
+ NEGATIVE_INFINITY = -math.inf
+
+ def __init__(self, buckets: List[Tuple[T, T]]):
+ self.buckets = {}
+ for start_end in buckets:
+ if self._get_bucket(start_end[0]) is not None:
+ raise Exception("Buckets overlap?!")
+ self.buckets[start_end] = 0
+ self.sigma = 0
+ self.median = RunningMedian()
+ self.maximum = None
+ self.minimum = None
+ self.count = 0
+
+ @staticmethod
+ def n_evenly_spaced_buckets(
+ min_bound: T,
+ max_bound: T,
+ n: int,
+ ) -> List[Tuple[T, T]]:
+ ret = []
+ stride = int((max_bound - min_bound) / n)
+ if stride <= 0:
+ raise Exception("Min must be < Max")
+ for bucket_start in range(min_bound, max_bound, stride):
+ ret.append((bucket_start, bucket_start + stride))
+ return ret
+
+ def _get_bucket(self, item: T) -> Optional[Tuple[T, T]]:
+ for start_end in self.buckets:
+ if start_end[0] <= item < start_end[1]:
+ return start_end
+ return None
+
+ def add_item(self, item: T) -> bool:
+ bucket = self._get_bucket(item)
+ if bucket is None:
+ return False
+ self.count += 1
+ self.buckets[bucket] += 1
+ self.sigma += item
+ self.median.add_number(item)
+ if self.maximum is None or item > self.maximum:
+ self.maximum = item
+ if self.minimum is None or item < self.minimum:
+ self.minimum = item
+ return True
+
+ def add_items(self, lst: Iterable[T]) -> bool:
+ all_true = True
+ for item in lst:
+ all_true = all_true and self.add_item(item)
+ return all_true
+
+ def __repr__(self) -> str:
+ max_population: Optional[int] = None
+ for bucket in self.buckets:
+ pop = self.buckets[bucket]
+ if pop > 0:
+ last_bucket_start = bucket[0]
+ if max_population is None or pop > max_population:
+ max_population = pop
+ txt = ""
+ if max_population is None:
+ return txt
+
+ for bucket in sorted(self.buckets, key=lambda x : x[0]):
+ pop = self.buckets[bucket]
+ start = bucket[0]
+ end = bucket[1]
+ bar = bar_graph(
+ (pop / max_population),
+ include_text = False,
+ width = 70,
+ left_end = "",
+ right_end = "")
+ label = f'{start}..{end}'
+ txt += f'{label:12}: ' + bar + f"({pop}) ({len(bar)})\n"
+ if start == last_bucket_start:
+ break
+
+ txt = txt + f'''{self.count} item(s)
+{self.maximum} max
+{self.minimum} min
+{self.sigma/self.count:.3f} mean
+{self.median.get_median()} median'''
+ return txt
--- /dev/null
+#!/usr/bin/env python3
+
+import itertools
+import logging
+
+logger = logging.getLogger(__name__)
+generators = {}
+
+
+def get(name: str) -> int:
+ """
+ def __init__(self):
+ self.my_unique_id = id_generator.get("student_id")
+ """
+ if name not in generators:
+ generators[name] = itertools.count()
+ x = next(generators[name])
+ logger.debug(f"Generated next id {x}")
+ return x
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities related to user input."""
+
+import readchar # type: ignore
+import signal
+import sys
+from typing import List
+
+
+def single_keystroke_response(
+ valid_responses: List[str],
+ *,
+ prompt: str = None,
+ default_response: str = None,
+ timeout_seconds: int = None,
+) -> str:
+ class TimeoutError(Exception):
+ pass
+
+ def _handle_timeout(signum, frame) -> None:
+ raise TimeoutError()
+
+ def _single_keystroke_response_internal(
+ valid_responses: List[str], timeout_seconds=None
+ ) -> str:
+ if timeout_seconds is not None:
+ signal.signal(signal.SIGALRM, _handle_timeout)
+ signal.alarm(timeout_seconds)
+
+ try:
+ while True:
+ response = readchar.readchar()
+ if response in valid_responses:
+ break
+ return response
+ finally:
+ if timeout_seconds is not None:
+ signal.alarm(0)
+
+ if prompt is not None:
+ print(prompt, end="")
+ sys.stdout.flush()
+ try:
+ response = _single_keystroke_response_internal(
+ valid_responses, timeout_seconds
+ )
+ except TimeoutError:
+ if default_response is not None:
+ response = default_response
+ if prompt is not None:
+ print(response)
+ return response
+
+
+def yn_response(prompt: str = None, *, timeout_seconds=None) -> str:
+ return single_keystroke_response(
+ ["y", "n", "Y", "N"], prompt=prompt, timeout_seconds=timeout_seconds
+ ).lower()
+
+
+def keystroke_helper() -> None:
+ print("Watching for keystrokes; ^C to quit.")
+ while True:
+ key = readchar.readkey()
+ if len(key) == 1:
+ print(f'That was "{key}" ({ord(key)}).')
+ if ord(key) == 3:
+ return
+ else:
+ print(f'That was sequence "{key}" (', end="")
+ for _ in key:
+ print(f" {ord(_)} ", end="")
+ print(")")
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities for dealing with the smart lights."""
+
+from abc import ABC, abstractmethod
+import datetime
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+from typing import Dict, List, Optional, Set
+
+import argparse_utils
+import config
+import logical_search
+import logging_utils
+import google_assistant as goog
+from decorator_utils import timeout, memoized
+
+logger = logging.getLogger(__name__)
+
+parser = config.add_commandline_args(
+ f"Light Utils ({__file__})",
+ "Args related to light utilities."
+)
+parser.add_argument(
+ '--light_utils_tplink_location',
+ default='/home/scott/bin/tplink.py',
+ metavar='FILENAME',
+ help='The location of the tplink.py helper',
+ type=argparse_utils.valid_filename,
+)
+parser.add_argument(
+ '--light_utils_network_mac_addresses_location',
+ default='/home/scott/bin/network_mac_addresses.txt',
+ metavar='FILENAME',
+ help='The location of network_mac_addresses.txt',
+ type=argparse_utils.valid_filename,
+)
+
+
+@timeout(
+ 5.0, use_signals=False, error_message="Timed out waiting for tplink.py"
+)
+def tplink_light_command(command: str) -> bool:
+ result = os.system(command)
+ signal = result & 0xFF
+ if signal != 0:
+ logging_utils.hlog("%s died with signal %d" % (command, signal))
+ return False
+ else:
+ exit_value = result >> 8
+ if exit_value != 0:
+ logging_utils.hlog("%s failed, exit %d" % (command, exit_value))
+ return False
+ return True
+
+
+class Light(ABC):
+ def __init__(self, name: str, mac: str, keywords: str = "") -> None:
+ self.name = name.strip()
+ self.mac = mac.strip()
+ self.keywords = keywords.strip()
+ self.kws = keywords.split()
+
+ def get_name(self) -> str:
+ return self.name
+
+ def get_mac(self) -> str:
+ return self.mac
+
+ @abstractmethod
+ def turn_on(self) -> bool:
+ pass
+
+ @abstractmethod
+ def turn_off(self) -> bool:
+ pass
+
+ @abstractmethod
+ def set_dimmer_level(self, level: int) -> bool:
+ pass
+
+ @abstractmethod
+ def make_color(self, color: str) -> bool:
+ pass
+
+ def get_keywords(self) -> List[str]:
+ return self.kws
+
+ def has_keyword(self, keyword: str) -> bool:
+ for kw in self.kws:
+ if kw == keyword:
+ return True
+ return False
+
+
+class GoogleLight(Light):
+ def __init__(self, name: str, mac: str, keywords: str = "") -> None:
+ super().__init__(name, mac, keywords)
+
+ def goog_name(self) -> str:
+ name = self.get_name()
+ return name.replace("_", " ")
+
+ @staticmethod
+ def parse_google_response(response: goog.GoogleResponse) -> bool:
+ return response.success
+
+ def turn_on(self) -> bool:
+ return GoogleLight.parse_google_response(
+ goog.ask_google(f"turn {self.goog_name()} on")
+ )
+
+ def turn_off(self) -> bool:
+ return GoogleLight.parse_google_response(
+ goog.ask_google(f"turn {self.goog_name()} off")
+ )
+
+ def set_dimmer_level(self, level: int) -> bool:
+ if 0 <= level <= 100:
+ return GoogleLight.parse_google_response(
+ goog.ask_google(f"set {self.goog_name()} to {level} percent")
+ )
+ return False
+
+ def make_color(self, color: str) -> bool:
+ return GoogleLight.parse_google_response(
+ goog.ask_google(f"make {self.goog_name()} {color}")
+ )
+
+
+class TPLinkLight(Light):
+ def __init__(self, name: str, mac: str, keywords: str = "") -> None:
+ super().__init__(name, mac, keywords)
+ self.children: List[str] = []
+ self.info: Optional[Dict] = None
+ self.info_ts: Optional[datetime.datetime] = None
+ if "children" in self.keywords:
+ self.info = self.get_info()
+ if self.info is not None:
+ for child in self.info["children"]:
+ self.children.append(child["id"])
+
+ @memoized
+ def get_tplink_name(self) -> Optional[str]:
+ self.info = self.get_info()
+ if self.info is not None:
+ return self.info["alias"]
+ return None
+
+ def get_cmdline(self, child: str = None) -> str:
+ cmd = (
+ f"{config.config['light_utils_tplink_location']} -m {self.mac} "
+ f"--no_logging_console "
+ )
+ if child is not None:
+ cmd += f"-x {child} "
+ return cmd
+
+ def get_children(self) -> List[str]:
+ return self.children
+
+ def command(
+ self, cmd: str, child: str = None, extra_args: str = None
+ ) -> bool:
+ cmd = self.get_cmdline(child) + f"-c {cmd}"
+ if extra_args is not None:
+ cmd += f" {extra_args}"
+ return tplink_light_command(cmd)
+
+ def turn_on(self, child: str = None) -> bool:
+ return self.command("on", child)
+
+ def turn_off(self, child: str = None) -> bool:
+ return self.command("off", child)
+
+ def make_color(self, color: str) -> bool:
+ raise NotImplementedError
+
+ @timeout(
+ 10.0, use_signals=False, error_message="Timed out waiting for tplink.py"
+ )
+ def get_info(self) -> Optional[Dict]:
+ cmd = self.get_cmdline() + "-c info"
+ out = subprocess.getoutput(cmd)
+ out = re.sub("Sent:.*\n", "", out)
+ out = re.sub("Received: *", "", out)
+ try:
+ self.info = json.loads(out)["system"]["get_sysinfo"]
+ self.info_ts = datetime.datetime.now()
+ return self.info
+ except Exception as e:
+ logger.exception(e)
+ print(out, file=sys.stderr)
+ self.info = None
+ self.info_ts = None
+ return None
+
+ def get_on_duration_seconds(self, child: str = None) -> int:
+ self.info = self.get_info()
+ if child is None:
+ if self.info is None:
+ return 0
+ return int(self.info.get("on_time", "0"))
+ else:
+ if self.info is None:
+ return 0
+ for chi in self.info.get("children", {}):
+ if chi["id"] == child:
+ return int(chi.get("on_time", "0"))
+ return 0
+
+ def get_on_limit_seconds(self) -> Optional[int]:
+ for kw in self.kws:
+ m = re.search(r"timeout:(\d+)", kw)
+ if m is not None:
+ return int(m.group(1)) * 60
+ return None
+
+ def set_dimmer_level(self, level: int) -> bool:
+ if not self.has_keyword("dimmer"):
+ return False
+ cmd = (
+ self.get_cmdline()
+ + f'-j \'{{"smartlife.iot.dimmer":{{"set_brightness":{{"brightness":{level} }} }} }}\''
+ )
+ return tplink_light_command(cmd)
+
+
+class LightingConfig(object):
+ """Representation of the smart light device config."""
+
+ def __init__(
+ self,
+ config_file: str = None,
+ ) -> None:
+ if config_file is None:
+ config_file = config.config[
+ 'light_utils_network_mac_addresses_location'
+ ]
+ self.macs_by_name = {}
+ self._keywords_by_name = {}
+ self.keywords_by_mac = {}
+ self.names_by_mac = {}
+ self.corpus = logical_search.Corpus()
+ with open(config_file, "r") as f:
+ contents = f.readlines()
+ for line in contents:
+ line = line.rstrip("\n")
+ line = re.sub(r"#.*$", r"", line)
+ line = line.strip()
+ if line == "":
+ continue
+ (mac, name, keywords) = line.split(",")
+ mac = mac.strip()
+ name = name.strip()
+ keywords = keywords.strip()
+ if "perm" not in keywords:
+ continue
+ properties = [("name", name)]
+ tags = set()
+ for kw in keywords.split():
+ if ":" in kw:
+ key, value = kw.split(":")
+ properties.append((key, value))
+ else:
+ tags.add(kw)
+ properties.append(("name", name))
+ self.macs_by_name[name] = mac
+ self._keywords_by_name[name] = keywords
+ self.keywords_by_mac[mac] = keywords
+ self.names_by_mac[mac] = name
+ self.corpus.add_doc(
+ logical_search.Document(
+ docid=mac,
+ tags=tags,
+ properties=properties,
+ reference=None,
+ )
+ )
+
+ def __repr__(self) -> str:
+ s = "Known devices:\n"
+ for name, keywords in self._keywords_by_name.items():
+ mac = self.macs_by_name[name]
+ s += f" {name} ({mac}) => {keywords}\n"
+ return s
+
+ def get_keywords_by_name(self, name: str) -> Optional[str]:
+ return self._keywords_by_name.get(name, None)
+
+ def get_macs_by_name(self, name: str) -> Set[str]:
+ retval = set()
+ for (mac, lname) in self.names_by_mac.items():
+ if name in lname:
+ retval.add(mac)
+ return retval
+
+ def get_macs_by_keyword(self, keyword: str) -> Set[str]:
+ retval = set()
+ for (mac, keywords) in self.keywords_by_mac.items():
+ if keyword in keywords:
+ retval.add(mac)
+ return retval
+
+ def get_light_by_name(self, name: str) -> Optional[Light]:
+ if name in self.macs_by_name:
+ return self.get_light_by_mac(self.macs_by_name[name])
+ return None
+
+ def get_all_lights(self) -> List[Light]:
+ retval = []
+ for (mac, kws) in self.keywords_by_mac.items():
+ if mac is not None:
+ light = self.get_light_by_mac(mac)
+ if light is not None:
+ retval.append(light)
+ return retval
+
+ def get_light_by_mac(self, mac: str) -> Optional[Light]:
+ if mac in self.keywords_by_mac:
+ name = self.names_by_mac[mac]
+ kws = self.keywords_by_mac[mac]
+ if "tplink" in kws.lower():
+ return TPLinkLight(name, mac, kws)
+ else:
+ return GoogleLight(name, mac, kws)
+ return None
+
+ def query(self, query: str) -> List[Light]:
+ """Evaluates a lighting query expression formed of keywords to search
+ for, logical operators (and, or, not), and parenthesis.
+ Returns a list of matching lights.
+ """
+ retval = []
+ results = self.corpus.query(query)
+ if results is not None:
+ for mac in results:
+ if mac is not None:
+ light = self.get_light_by_mac(mac)
+ if light is not None:
+ retval.append(light)
+ return retval
--- /dev/null
+#!/usr/bin/env python3
+
+from itertools import islice
+from typing import Any, Iterator, List
+
+
+def shard(lst: List[Any], size: int) -> Iterator[Any]:
+ """Yield successive size-sized shards from lst."""
+ for x in range(0, len(lst), size):
+ yield islice(lst, x, x + size)
+
+
+def flatten(lst: List[Any]) -> List[Any]:
+ """Flatten out a list:
+
+ >>> flatten([ 1, [2, 3, 4, [5], 6], 7, [8, [9]]])
+ [1, 2, 3, 4, 5, 6, 7, 8, 9]
+
+ """
+ if len(lst) == 0:
+ return lst
+ if isinstance(lst[0], list):
+ return flatten(lst[0]) + flatten(lst[1:])
+ return lst[:1] + flatten(lst[1:])
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities related to logging."""
+
+import contextlib
+import logging
+from logging.handlers import RotatingFileHandler, SysLogHandler
+import os
+import sys
+
+import argparse_utils
+import config
+import string_utils as su
+import thread_utils as tu
+
+parser = config.add_commandline_args(
+ f'Logging ({__file__})',
+ 'Args related to logging')
+parser.add_argument(
+ '--logging_config_file',
+ type=argparse_utils.valid_filename,
+ default=None,
+ metavar='FILENAME',
+ help='Config file containing the logging setup, see: https://docs.python.org/3/howto/logging.html#logging-advanced-tutorial',
+)
+parser.add_argument(
+ '--logging_level',
+ type=str,
+ default='INFO',
+ choices=['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
+ metavar='LEVEL',
+ help='The level below which to squelch log messages.',
+)
+parser.add_argument(
+ '--logging_format',
+ type=str,
+ default='%(levelname)s:%(asctime)s: %(message)s',
+ help='The format for lines logged via the logger module.'
+)
+parser.add_argument(
+ '--logging_date_format',
+ type=str,
+ default='%Y/%m/%dT%H:%M:%S%z',
+ metavar='DATEFMT',
+ help='The format of any dates in --logging_format.'
+)
+parser.add_argument(
+ '--logging_console',
+ action=argparse_utils.ActionNoYes,
+ default=True,
+ help='Should we log to the console (stderr)',
+)
+parser.add_argument(
+ '--logging_filename',
+ type=str,
+ default=None,
+ metavar='FILENAME',
+ help='The filename of the logfile to write.'
+)
+parser.add_argument(
+ '--logging_filename_maxsize',
+ type=int,
+ default=(1024*1024),
+ metavar='#BYTES',
+ help='The maximum size (in bytes) to write to the logging_filename.'
+)
+parser.add_argument(
+ '--logging_filename_count',
+ type=int,
+ default=2,
+ metavar='COUNT',
+ help='The number of logging_filename copies to keep before deleting.'
+)
+parser.add_argument(
+ '--logging_syslog',
+ action=argparse_utils.ActionNoYes,
+ default=False,
+ help='Should we log to localhost\'s syslog.'
+)
+parser.add_argument(
+ '--logging_debug_threads',
+ action=argparse_utils.ActionNoYes,
+ default=False,
+ help='Should we prepend pid/tid data to all log messages?'
+)
+parser.add_argument(
+ '--logging_info_is_print',
+ action=argparse_utils.ActionNoYes,
+ default=False,
+ help='logging.info also prints to stdout.'
+)
+
+
+class OnlyInfoFilter(logging.Filter):
+ def filter(self, record):
+ return record.levelno == logging.INFO
+
+
+def initialize_logging(logger=None) -> logging.Logger:
+ assert config.has_been_parsed()
+ if logger is None:
+ logger = logging.getLogger() # Root logger
+
+ if config.config['logging_config_file'] is not None:
+ logging.config.fileConfig('logging.conf')
+ return logger
+
+ handlers = []
+ numeric_level = getattr(
+ logging,
+ config.config['logging_level'].upper(),
+ None
+ )
+ if not isinstance(numeric_level, int):
+ raise ValueError('Invalid level: %s' % config.config['logging_level'])
+
+ fmt = config.config['logging_format']
+ if config.config['logging_debug_threads']:
+ fmt = f'%(process)d.%(thread)d|{fmt}'
+
+ if config.config['logging_syslog']:
+ if sys.platform in ('win32', 'cygwin'):
+ print(
+ "WARNING: Current platform does not support syslog; IGNORING.",
+ file=sys.stderr
+ )
+ else:
+ handler = SysLogHandler()
+# for k, v in encoded_priorities.items():
+# handler.encodePriority(k, v)
+ handler.setFormatter(
+ logging.Formatter(
+ fmt=fmt,
+ datefmt=config.config['logging_date_format'],
+ )
+ )
+ handler.setLevel(numeric_level)
+ handlers.append(handler)
+
+ if config.config['logging_filename'] is not None:
+ handler = RotatingFileHandler(
+ config.config['logging_filename'],
+ maxBytes = config.config['logging_filename_maxsize'],
+ backupCount = config.config['logging_filename_count'],
+ )
+ handler.setLevel(numeric_level)
+ handler.setFormatter(
+ logging.Formatter(
+ fmt=fmt,
+ datefmt=config.config['logging_date_format'],
+ )
+ )
+ handlers.append(handler)
+
+ if config.config['logging_console']:
+ handler = logging.StreamHandler(sys.stderr)
+ handler.setLevel(numeric_level)
+ handler.setFormatter(
+ logging.Formatter(
+ fmt=fmt,
+ datefmt=config.config['logging_date_format'],
+ )
+ )
+ handlers.append(handler)
+
+ if len(handlers) == 0:
+ handlers.append(logging.NullHandler())
+
+ for handler in handlers:
+ logger.addHandler(handler)
+ if config.config['logging_info_is_print']:
+ handler = logging.StreamHandler(sys.stdout)
+ handler.addFilter(OnlyInfoFilter())
+ logger.addHandler(handler)
+ logger.setLevel(numeric_level)
+ logger.propagate = False
+ return logger
+
+
+def get_logger(name: str = ""):
+ logger = logging.getLogger(name)
+ return initialize_logging(logger)
+
+
+def tprint(*args, **kwargs) -> None:
+ if config.config['logging_debug_threads']:
+ print(f'{tu.current_thread_id()}', end="")
+ print(*args, **kwargs)
+ else:
+ pass
+
+
+def dprint(*args, **kwargs) -> None:
+ print(*args, file=sys.stderr, **kwargs)
+
+
+class OutputSink(object):
+
+ # Bits in the destination_bitv bitvector. Used to indicate the
+ # output destination.
+ STDOUT = 0x1
+ STDERR = 0x2
+ LOG_DEBUG = 0x4 # -\
+ LOG_INFO = 0x8 # |
+ LOG_WARNING = 0x10 # > Should provide logger to the c'tor.
+ LOG_ERROR = 0x20 # |
+ LOG_CRITICAL = 0x40 # _/
+ FILENAME = 0x80 # Must provide a filename to the c'tor.
+ HLOG = 0x100
+
+ ALL_LOG_DESTINATIONS = (
+ LOG_DEBUG | LOG_INFO | LOG_WARNING | LOG_ERROR | LOG_CRITICAL
+ )
+ ALL_OUTPUT_DESTINATIONS = 0x1FF
+
+ def __init__(self,
+ destination_bitv: int,
+ *,
+ logger=None,
+ filename=None):
+ if logger is None:
+ logger = logging.getLogger(None)
+ self.logger = logger
+
+ if filename is not None:
+ self.f = open(filename, "wb", buffering=0)
+ else:
+ if self.destination_bitv & OutputSink.FILENAME:
+ raise ValueError(
+ "Filename argument is required if bitv & FILENAME"
+ )
+ self.f = None
+ self.set_destination_bitv(destination_bitv)
+
+ def get_destination_bitv(self):
+ return self.destination_bitv
+
+ def set_destination_bitv(self, destination_bitv: int):
+ if destination_bitv & self.FILENAME and self.f is None:
+ raise ValueError(
+ "Filename argument is required if bitv & FILENAME"
+ )
+ self.destination_bitv = destination_bitv
+
+ def print(self, *args, **kwargs):
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ buf = su.sprintf(*args, end="", sep=sep)
+ if sep is None:
+ sep = " "
+ if end is None:
+ end = "\n"
+ if self.destination_bitv & self.STDOUT:
+ print(buf, file=sys.stdout, sep=sep, end=end)
+ if self.destination_bitv & self.STDERR:
+ print(buf, file=sys.stderr, sep=sep, end=end)
+ if end == '\n':
+ buf += '\n'
+ if self.destination_bitv & self.FILENAME and self.f is not None:
+ self.f.write(buf.encode('utf-8'))
+ self.f.flush()
+ buf = su.strip_escape_sequences(buf)
+ if self.logger is not None:
+ if self.destination_bitv & self.LOG_DEBUG:
+ self.logger.debug(buf)
+ if self.destination_bitv & self.LOG_INFO:
+ self.logger.info(buf)
+ if self.destination_bitv & self.LOG_WARNING:
+ self.logger.warning(buf)
+ if self.destination_bitv & self.LOG_ERROR:
+ self.logger.error(buf)
+ if self.destination_bitv & self.LOG_CRITICAL:
+ self.logger.critical(buf)
+ if self.destination_bitv & self.HLOG:
+ hlog(buf)
+
+ def close(self):
+ if self.f is not None:
+ self.f.close()
+
+
+class OutputContext(OutputSink, contextlib.ContextDecorator):
+ def __init__(self,
+ destination_bitv: int,
+ *,
+ logger=None,
+ filename=None):
+ super().__init__(destination_bitv, logger=logger, filename=filename)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, etype, value, traceback):
+ super().close()
+ if etype is not None:
+ return False
+ return True
+
+
+def hlog(message: str) -> None:
+ message = message.replace("'", "'\"'\"'")
+ os.system(f"/usr/bin/logger -p local7.info -- '{message}'")
--- /dev/null
+#!/usr/bin/env python3
+
+from __future__ import annotations
+
+from collections import defaultdict
+import enum
+import sys
+from typing import (
+ Any,
+ Dict,
+ List,
+ NamedTuple,
+ Optional,
+ Set,
+ Sequence,
+ Tuple,
+ Union,
+)
+
+
+class ParseError(Exception):
+ """An error encountered while parsing a logical search expression."""
+
+ def __init__(self, message: str):
+ self.message = message
+
+
+class Document(NamedTuple):
+ """A tuple representing a searchable document."""
+
+ docid: str # a unique idenfier for the document
+ tags: Set[str] # an optional set of tags
+ properties: List[
+ Tuple[str, str]
+ ] # an optional set of key->value properties
+ reference: Any # an optional reference to something else
+
+
+class Operation(enum.Enum):
+ """A logical search query operation."""
+
+ QUERY = 1
+ CONJUNCTION = 2
+ DISJUNCTION = 3
+ INVERSION = 4
+
+ @staticmethod
+ def from_token(token: str):
+ table = {
+ "not": Operation.INVERSION,
+ "and": Operation.CONJUNCTION,
+ "or": Operation.DISJUNCTION,
+ }
+ return table.get(token, None)
+
+ def num_operands(self) -> Optional[int]:
+ table = {
+ Operation.INVERSION: 1,
+ Operation.CONJUNCTION: 2,
+ Operation.DISJUNCTION: 2,
+ }
+ return table.get(self, None)
+
+
+class Corpus(object):
+ """A collection of searchable documents."""
+
+ def __init__(self) -> None:
+ self.docids_by_tag: Dict[str, Set[str]] = defaultdict(set)
+ self.docids_by_property: Dict[Tuple[str, str], Set[str]] = defaultdict(
+ set
+ )
+ self.docids_with_property: Dict[str, Set[str]] = defaultdict(set)
+ self.documents_by_docid: Dict[str, Document] = {}
+
+ def add_doc(self, doc: Document) -> None:
+ """Add a new Document to the Corpus. Each Document must have a
+ distinct docid that will serve as its primary identifier. If
+ the same Document is added multiple times, only the most
+ recent addition is indexed. If two distinct documents with
+ the same docid are added, the latter klobbers the former in the
+ indexes.
+
+ Each Document may have an optional set of tags which can be
+ used later in expressions to the query method.
+
+ Each Document may have an optional list of key->value tuples
+ which can be used later in expressions to the query method.
+
+ Document includes a user-defined "reference" field which is
+ never interpreted by this module. This is meant to allow easy
+ mapping between Documents in this corpus and external objects
+ they may represent.
+ """
+
+ if doc.docid in self.documents_by_docid:
+ # Handle collisions; assume that we are re-indexing the
+ # same document so remove it from the indexes before
+ # adding it back again.
+ colliding_doc = self.documents_by_docid[doc.docid]
+ assert colliding_doc.docid == doc.docid
+ del self.documents_by_docid[doc.docid]
+ for tag in colliding_doc.tags:
+ self.docids_by_tag[tag].remove(doc.docid)
+ for key, value in colliding_doc.properties:
+ self.docids_by_property[(key, value)].remove(doc.docid)
+ self.docids_with_property[key].remove(doc.docid)
+
+ # Index the new Document
+ assert doc.docid not in self.documents_by_docid
+ self.documents_by_docid[doc.docid] = doc
+ for tag in doc.tags:
+ self.docids_by_tag[tag].add(doc.docid)
+ for key, value in doc.properties:
+ self.docids_by_property[(key, value)].add(doc.docid)
+ self.docids_with_property[key].add(doc.docid)
+
+ def get_docids_by_exact_tag(self, tag: str) -> Set[str]:
+ """Return the set of docids that have a particular tag."""
+
+ return self.docids_by_tag[tag]
+
+ def get_docids_by_searching_tags(self, tag: str) -> Set[str]:
+ """Return the set of docids with a tag that contains a str"""
+
+ ret = set()
+ for search_tag in self.docids_by_tag:
+ if tag in search_tag:
+ for docid in self.docids_by_tag[search_tag]:
+ ret.add(docid)
+ return ret
+
+ def get_docids_with_property(self, key: str) -> Set[str]:
+ """Return the set of docids that have a particular property no matter
+ what that property's value.
+ """
+
+ return self.docids_with_property[key]
+
+ def get_docids_by_property(self, key: str, value: str) -> Set[str]:
+ """Return the set of docids that have a particular property with a
+ particular value..
+ """
+
+ return self.docids_by_property[(key, value)]
+
+ def invert_docid_set(self, original: Set[str]) -> Set[str]:
+ """Invert a set of docids."""
+
+ return set(
+ [
+ docid
+ for docid in self.documents_by_docid.keys()
+ if docid not in original
+ ]
+ )
+
+ def get_doc(self, docid: str) -> Optional[Document]:
+ """Given a docid, retrieve the previously added Document."""
+
+ return self.documents_by_docid.get(docid, None)
+
+ def query(self, query: str) -> Optional[Set[str]]:
+ """Query the corpus for documents that match a logical expression.
+ Returns a (potentially empty) set of docids for the matching
+ (previously added) documents or None on error.
+
+ e.g.
+
+ tag1 and tag2 and not tag3
+
+ (tag1 or tag2) and (tag3 or tag4)
+
+ (tag1 and key2:value2) or (tag2 and key1:value1)
+
+ key:*
+
+ tag1 and key:*
+ """
+
+ try:
+ root = self._parse_query(query)
+ except ParseError as e:
+ print(e.message, file=sys.stderr)
+ return None
+ return root.eval()
+
+ def _parse_query(self, query: str):
+ """Internal parse helper; prefer to use query instead."""
+
+ parens = set(["(", ")"])
+ and_or = set(["and", "or"])
+
+ def operator_precedence(token: str) -> Optional[int]:
+ table = {
+ "(": 4, # higher
+ ")": 4,
+ "not": 3,
+ "and": 2,
+ "or": 1, # lower
+ }
+ return table.get(token, None)
+
+ def is_operator(token: str) -> bool:
+ return operator_precedence(token) is not None
+
+ def lex(query: str):
+ query = query.lower()
+ tokens = query.split()
+ for token in tokens:
+ # Handle ( and ) operators stuck to the ends of tokens
+ # that split() doesn't understand.
+ if len(token) > 1:
+ first = token[0]
+ if first in parens:
+ tail = token[1:]
+ yield first
+ token = tail
+ last = token[-1]
+ if last in parens:
+ head = token[0:-1]
+ yield head
+ token = last
+ yield token
+
+ def evaluate(corpus: Corpus, stack: List[str]):
+ node_stack: List[Node] = []
+ for token in stack:
+ node = None
+ if not is_operator(token):
+ node = Node(corpus, Operation.QUERY, [token])
+ else:
+ args = []
+ operation = Operation.from_token(token)
+ operand_count = operation.num_operands()
+ if len(node_stack) < operand_count:
+ raise ParseError(
+ f"Incorrect number of operations for {operation}"
+ )
+ for _ in range(operation.num_operands()):
+ args.append(node_stack.pop())
+ node = Node(corpus, operation, args)
+ node_stack.append(node)
+ return node_stack[0]
+
+ output_stack = []
+ operator_stack = []
+ for token in lex(query):
+ if not is_operator(token):
+ output_stack.append(token)
+ continue
+
+ # token is an operator...
+ if token == "(":
+ operator_stack.append(token)
+ elif token == ")":
+ ok = False
+ while len(operator_stack) > 0:
+ pop_operator = operator_stack.pop()
+ if pop_operator != "(":
+ output_stack.append(pop_operator)
+ else:
+ ok = True
+ break
+ if not ok:
+ raise ParseError(
+ "Unbalanced parenthesis in query expression"
+ )
+
+ # and, or, not
+ else:
+ my_precedence = operator_precedence(token)
+ if my_precedence is None:
+ raise ParseError(f"Unknown operator: {token}")
+ while len(operator_stack) > 0:
+ peek_operator = operator_stack[-1]
+ if not is_operator(peek_operator) or peek_operator == "(":
+ break
+ peek_precedence = operator_precedence(peek_operator)
+ if peek_precedence is None:
+ raise ParseError("Internal error")
+ if (
+ (peek_precedence < my_precedence)
+ or (peek_precedence == my_precedence)
+ and (peek_operator not in and_or)
+ ):
+ break
+ output_stack.append(operator_stack.pop())
+ operator_stack.append(token)
+ while len(operator_stack) > 0:
+ token = operator_stack.pop()
+ if token in parens:
+ raise ParseError("Unbalanced parenthesis in query expression")
+ output_stack.append(token)
+ return evaluate(self, output_stack)
+
+
+class Node(object):
+ """A query AST node."""
+
+ def __init__(
+ self,
+ corpus: Corpus,
+ op: Operation,
+ operands: Sequence[Union[Node, str]],
+ ):
+ self.corpus = corpus
+ self.op = op
+ self.operands = operands
+
+ def eval(self) -> Set[str]:
+ """Evaluate this node."""
+
+ evaled_operands: List[Union[Set[str], str]] = []
+ for operand in self.operands:
+ if isinstance(operand, Node):
+ evaled_operands.append(operand.eval())
+ elif isinstance(operand, str):
+ evaled_operands.append(operand)
+ else:
+ raise ParseError(f"Unexpected operand: {operand}")
+
+ retval = set()
+ if self.op is Operation.QUERY:
+ for tag in evaled_operands:
+ if isinstance(tag, str):
+ if ":" in tag:
+ try:
+ key, value = tag.split(":")
+ except ValueError as v:
+ raise ParseError(
+ f'Invalid key:value syntax at "{tag}"'
+ ) from v
+ if value == "*":
+ r = self.corpus.get_docids_with_property(key)
+ else:
+ r = self.corpus.get_docids_by_property(key, value)
+ else:
+ r = self.corpus.get_docids_by_exact_tag(tag)
+ retval.update(r)
+ else:
+ raise ParseError(f"Unexpected query {tag}")
+ elif self.op is Operation.DISJUNCTION:
+ if len(evaled_operands) != 2:
+ raise ParseError(
+ "Operation.DISJUNCTION (or) expects two operands."
+ )
+ retval.update(evaled_operands[0])
+ retval.update(evaled_operands[1])
+ elif self.op is Operation.CONJUNCTION:
+ if len(evaled_operands) != 2:
+ raise ParseError(
+ "Operation.CONJUNCTION (and) expects two operands."
+ )
+ retval.update(evaled_operands[0])
+ retval = retval.intersection(evaled_operands[1])
+ elif self.op is Operation.INVERSION:
+ if len(evaled_operands) != 1:
+ raise ParseError(
+ "Operation.INVERSION (not) expects one operand."
+ )
+ _ = evaled_operands[0]
+ if isinstance(_, set):
+ retval.update(self.corpus.invert_docid_set(_))
+ else:
+ raise ParseError(f"Unexpected negation operand {_} ({type(_)})")
+ return retval
--- /dev/null
+#!/usr/bin/env python3
+
+import math
+from typing import List
+from heapq import heappush, heappop
+
+
+class RunningMedian:
+ def __init__(self):
+ self.lowers, self.highers = [], []
+
+ def add_number(self, number):
+ if not self.highers or number > self.highers[0]:
+ heappush(self.highers, number)
+ else:
+ heappush(self.lowers, -number) # for lowers we need a max heap
+ self.rebalance()
+
+ def rebalance(self):
+ if len(self.lowers) - len(self.highers) > 1:
+ heappush(self.highers, -heappop(self.lowers))
+ elif len(self.highers) - len(self.lowers) > 1:
+ heappush(self.lowers, -heappop(self.highers))
+
+ def get_median(self):
+ if len(self.lowers) == len(self.highers):
+ return (-self.lowers[0] + self.highers[0])/2
+ elif len(self.lowers) > len(self.highers):
+ return -self.lowers[0]
+ else:
+ return self.highers[0]
+
+
+def gcd_floats(a: float, b: float) -> float:
+ if a < b:
+ return gcd_floats(b, a)
+
+ # base case
+ if abs(b) < 0.001:
+ return a
+ return gcd_floats(b, a - math.floor(a / b) * b)
+
+
+def gcd_float_sequence(lst: List[float]) -> float:
+ if len(lst) <= 0:
+ raise Exception("Need at least one number")
+ elif len(lst) == 1:
+ return lst[0]
+ assert len(lst) >= 2
+ gcd = gcd_floats(lst[0], lst[1])
+ for i in range(2, len(lst)):
+ gcd = gcd_floats(gcd, lst[i])
+ return gcd
+
+
+def truncate_float(n: float, decimals: int = 2):
+ """Truncate a float to a particular number of decimals."""
+ assert decimals > 0 and decimals < 10
+ multiplier = 10 ** decimals
+ return int(n * multiplier) / multiplier
+
+
+def is_prime(n: int) -> bool:
+ """Returns True if n is prime and False otherwise"""
+ if not isinstance(n, int):
+ raise TypeError("argument passed to is_prime is not of 'int' type")
+
+ # Corner cases
+ if n <= 1:
+ return False
+ if n <= 3:
+ return True
+
+ # This is checked so that we can skip middle five numbers in below
+ # loop
+ if (n % 2 == 0 or n % 3 == 0):
+ return False
+
+ i = 5
+ while i * i <= n:
+ if (n % i == 0 or n % (i + 2) == 0):
+ return False
+ i = i + 6
+ return True
--- /dev/null
+#!/usr/bin/env python3
+
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+import datetime
+import glob
+import logging
+import os
+import pickle
+import random
+import sys
+from types import SimpleNamespace
+from typing import Any, List, NamedTuple, Optional, Set, Tuple
+
+import numpy as np
+from sklearn.model_selection import train_test_split # type:ignore
+from sklearn.preprocessing import MinMaxScaler # type: ignore
+
+from ansi import bold, reset
+import argparse_utils
+import config
+import datetime_utils
+import decorator_utils
+import input_utils
+import list_utils
+import parallelize as par
+import smart_future
+import string_utils
+import text_utils
+
+logger = logging.getLogger(__file__)
+
+parser = config.add_commandline_args(
+ f"ML Model Trainer ({__file__})",
+ "Arguments related to training an ML model"
+)
+parser.add_argument(
+ "--ml_trainer_quiet",
+ action="store_true",
+ help="Don't prompt the user for anything."
+)
+parser.add_argument(
+ "--ml_trainer_delete",
+ action="store_true",
+ help="Delete invalid/incomplete features files in addition to warning."
+)
+group = parser.add_mutually_exclusive_group()
+group.add_argument(
+ "--ml_trainer_dry_run",
+ action="store_true",
+ help="Do not write a new model, just report efficacy.",
+)
+group.add_argument(
+ "--ml_trainer_predicate",
+ type=argparse_utils.valid_percentage,
+ metavar='0..100',
+ help="Persist the model if the test set score is >= this predicate.",
+)
+
+
+class InputSpec(SimpleNamespace):
+ file_glob: str
+ feature_count: int
+ features_to_skip: Set[str]
+ key_value_delimiter: str
+ training_parameters: List
+ label: str
+ basename: str
+ dry_run: Optional[bool]
+ quiet: Optional[bool]
+ persist_predicate: Optional[float]
+ delete_bad_inputs: Optional[bool]
+
+ @staticmethod
+ def populate_from_config() -> InputSpec:
+ return InputSpec(
+ dry_run = config.config["ml_trainer_dry_run"],
+ quiet = config.config["ml_trainer_quiet"],
+ persist_predicate = config.config["ml_trainer_predicate"],
+ delete_bad_inputs = config.config["ml_trainer_delete"],
+ )
+
+
+class OutputSpec(NamedTuple):
+ model_filename: Optional[str]
+ model_info_filename: Optional[str]
+ scaler_filename: Optional[str]
+ training_score: float
+ test_score: float
+
+
+class TrainingBlueprint(ABC):
+ def __init__(self):
+ self.y_train = None
+ self.y_test = None
+ self.X_test_scaled = None
+ self.X_train_scaled = None
+ self.file_done_count = 0
+ self.total_file_count = 0
+ self.spec = None
+
+ def train(self, spec: InputSpec) -> OutputSpec:
+ random.seed()
+ self.spec = spec
+
+ X_, y_ = self.read_input_files()
+ num_examples = len(y_)
+
+ # Every example's features
+ X = np.array(X_)
+
+ # Every example's label
+ y = np.array(y_)
+
+ print("Doing random test/train split...")
+ X_train, X_test, self.y_train, self.y_test = self.test_train_split(
+ X,
+ y,
+ )
+
+ print("Scaling training data...")
+ scaler, self.X_train_scaled, self.X_test_scaled = self.scale_data(
+ X_train,
+ X_test,
+ )
+
+ print("Training model(s)...")
+ models = []
+ modelid_to_params = {}
+ for params in self.spec.training_parameters:
+ model = self.train_model(
+ params,
+ self.X_train_scaled,
+ self.y_train
+ )
+ models.append(model)
+ modelid_to_params[model.get_id()] = str(params)
+
+ best_model = None
+ best_score = None
+ best_test_score = None
+ best_training_score = None
+ best_params = None
+ for model in smart_future.wait_many(models):
+ params = modelid_to_params[model.get_id()]
+ if isinstance(model, smart_future.SmartFuture):
+ model = model._resolve()
+ training_score, test_score = self.evaluate_model(
+ model,
+ self.X_train_scaled,
+ self.y_train,
+ self.X_test_scaled,
+ self.y_test,
+ )
+ score = (training_score + test_score * 20) / 21
+ if not self.spec.quiet:
+ print(
+ f"{bold()}{params}{reset()}: "
+ f"Training set score={training_score:.2f}%, "
+ f"test set score={test_score:.2f}%",
+ file=sys.stderr,
+ )
+ if best_score is None or score > best_score:
+ best_score = score
+ best_test_score = test_score
+ best_training_score = training_score
+ best_model = model
+ best_params = params
+ if not self.spec.quiet:
+ print(
+ f"New best score {best_score:.2f}% with params {params}"
+ )
+
+ if not self.spec.quiet:
+ msg = f"Done training; best test set score was: {best_test_score:.1f}%"
+ print(msg)
+ logger.info(msg)
+ scaler_filename, model_filename, model_info_filename = (
+ self.maybe_persist_scaler_and_model(
+ best_training_score,
+ best_test_score,
+ best_params,
+ num_examples,
+ scaler,
+ best_model,
+ )
+ )
+ return OutputSpec(
+ model_filename = model_filename,
+ model_info_filename = model_info_filename,
+ scaler_filename = scaler_filename,
+ training_score = best_training_score,
+ test_score = best_test_score,
+ )
+
+ @par.parallelize(method=par.Method.THREAD)
+ def read_files_from_list(
+ self,
+ files: List[str],
+ n: int
+ ) -> Tuple[List, List]:
+ # All features
+ X = []
+
+ # The label
+ y = []
+
+ for filename in files:
+ wrote_label = False
+ with open(filename, "r") as f:
+ lines = f.readlines()
+
+ # This example's features
+ x = []
+ for line in lines:
+
+ # We expect lines in features files to be of the form:
+ #
+ # key: value
+ line = line.strip()
+ try:
+ (key, value) = line.split(self.spec.key_value_delimiter)
+ except Exception as e:
+ logger.exception(e)
+ print(f"WARNING: bad line '{line}', skipped")
+ continue
+
+ key = key.strip()
+ value = value.strip()
+ if (self.spec.features_to_skip is not None
+ and key in self.spec.features_to_skip):
+ logger.debug(f"Skipping feature {key}")
+ continue
+
+ value = self.normalize_feature(value)
+
+ if key == self.spec.label:
+ y.append(value)
+ wrote_label = True
+ else:
+ x.append(value)
+
+ # Make sure we saw a label and the requisite number of features.
+ if len(x) == self.spec.feature_count and wrote_label:
+ X.append(x)
+ self.file_done_count += 1
+ else:
+ if wrote_label:
+ y.pop()
+
+ if self.spec.delete_bad_inputs:
+ msg = f"WARNING: {filename}: missing features or label. DELETING."
+ print(msg, file=sys.stderr)
+ logger.warning(msg)
+ os.remove(filename)
+ else:
+ msg = f"WARNING: {filename}: missing features or label. Skipped."
+ print(msg, file=sys.stderr)
+ logger.warning(msg)
+ return (X, y)
+
+ def make_progress_graph(self) -> None:
+ if not self.spec.quiet:
+ text_utils.progress_graph(self.file_done_count,
+ self.total_file_count)
+
+ @decorator_utils.timed
+ def read_input_files(self):
+ # All features
+ X = []
+
+ # The label
+ y = []
+
+ results = []
+ all_files = glob.glob(self.spec.file_glob)
+ self.total_file_count = len(all_files)
+ for n, files in enumerate(list_utils.shard(all_files, 500)):
+ file_list = list(files)
+ results.append(self.read_files_from_list(file_list, n))
+
+ for result in smart_future.wait_many(results, callback=self.make_progress_graph):
+ result = result._resolve()
+ for z in result[0]:
+ X.append(z)
+ for z in result[1]:
+ y.append(z)
+ if not self.spec.quiet:
+ print(" " * 80 + "\n")
+ return (X, y)
+
+ def normalize_feature(self, value: str) -> Any:
+ if value in ("False", "None"):
+ ret = 0
+ elif value == "True":
+ ret = 255
+ elif isinstance(value, str) and "." in value:
+ ret = round(float(value) * 100.0)
+ else:
+ ret = int(value)
+ return ret
+
+ def test_train_split(self, X, y) -> List:
+ logger.debug("Performing test/train split")
+ return train_test_split(
+ X,
+ y,
+ random_state=random.randrange(0, 1000),
+ )
+
+ def scale_data(self,
+ X_train: np.ndarray,
+ X_test: np.ndarray) -> Tuple[Any, np.ndarray, np.ndarray]:
+ logger.debug("Scaling data")
+ scaler = MinMaxScaler()
+ scaler.fit(X_train)
+ return (scaler, scaler.transform(X_train), scaler.transform(X_test))
+
+ # Note: children should implement. Consider using @parallelize.
+ @abstractmethod
+ def train_model(self,
+ parameters,
+ X_train_scaled: np.ndarray,
+ y_train: np.ndarray) -> Any:
+ pass
+
+ def evaluate_model(
+ self,
+ model: Any,
+ X_train_scaled: np.ndarray,
+ y_train: np.ndarray,
+ X_test_scaled: np.ndarray,
+ y_test: np.ndarray) -> Tuple[np.float64, np.float64]:
+ logger.debug("Evaluating the model")
+ training_score = model.score(X_train_scaled, y_train) * 100.0
+ test_score = model.score(X_test_scaled, y_test) * 100.0
+ logger.info(
+ f"Model evaluation results: test_score={test_score:.5f}, "
+ f"train_score={training_score:.5f}"
+ )
+ return (training_score, test_score)
+
+ def maybe_persist_scaler_and_model(
+ self,
+ training_score: np.float64,
+ test_score: np.float64,
+ params: str,
+ num_examples: int,
+ scaler: Any,
+ model: Any) -> Tuple[Optional[str], Optional[str], Optional[str]]:
+ if not self.spec.dry_run:
+ if (
+ (self.spec.persist_predicate is not None and
+ test_score > self.spec.persist_predicate)
+ or
+ (not self.spec.quiet
+ and input_utils.yn_response("Write the model? [y,n]: ") == "y")
+ ):
+ scaler_filename = f"{self.spec.basename}_scaler.sav"
+ with open(scaler_filename, "wb") as f:
+ pickle.dump(scaler, f)
+ msg = f"Wrote {scaler_filename}"
+ print(msg)
+ logger.info(msg)
+ model_filename = f"{self.spec.basename}_model.sav"
+ with open(model_filename, "wb") as f:
+ pickle.dump(model, f)
+ msg = f"Wrote {model_filename}"
+ print(msg)
+ logger.info(msg)
+ model_info_filename = f"{self.spec.basename}_model_info.txt"
+ now: datetime.datetime = datetime_utils.now_pst()
+ info = f"""Timestamp: {datetime_utils.datetime_to_string(now)}
+Model params: {params}
+Training examples: {num_examples}
+Training set score: {training_score:.2f}%
+Testing set score: {test_score:.2f}%"""
+ with open(model_info_filename, "w") as f:
+ f.write(info)
+ msg = f"Wrote {model_info_filename}:"
+ print(msg)
+ logger.info(msg)
+ print(string_utils.indent(info, 2))
+ logger.info(info)
+ return (scaler_filename, model_filename, model_info_filename)
+ return (None, None, None)
--- /dev/null
+#!/usr/bin/env python3
+
+import glob
+import os
+from typing import Callable, List, NamedTuple, Set
+
+import argparse_utils
+import config
+import input_utils
+
+parser = config.add_commandline_args(
+ f"ML Quick Labeler ({__file__})",
+ "Args related to quick labeling of ML training data",
+)
+parser.add_argument(
+ "--ml_quick_label_skip_list_path",
+ default="./qlabel_skip_list.txt",
+ metavar="FILENAME",
+ type=argparse_utils.valid_filename,
+ help="Path to file in which to store already labeled data",
+)
+
+
+class InputSpec(NamedTuple):
+ image_file_glob: str
+ image_file_to_features_file: Callable[[str], str]
+ label: str
+ valid_keystrokes: List[str]
+ prompt: str
+ keystroke_to_label: Callable[[str], str]
+
+
+def read_skip_list() -> Set[str]:
+ ret: Set[str] = set()
+ quick_skip_file = config.config['ml_quick_label_skip_list_path']
+ if not os.path.exists(quick_skip_file):
+ return ret
+ with open(quick_skip_file, 'r') as f:
+ lines = f.readlines()
+ for line in lines:
+ line = line[:-1]
+ line.strip()
+ ret.add(line)
+ return ret
+
+
+def write_skip_list(skip_list) -> None:
+ quick_skip_file = config.config['ml_quick_label_skip_list_path']
+ with open(quick_skip_file, 'w') as f:
+ for filename in skip_list:
+ filename = filename.strip()
+ if len(filename) > 0:
+ f.write(f'{filename}\n')
+
+
+def label(in_spec: InputSpec) -> None:
+ images = glob.glob(in_spec.image_file_glob)
+
+ skip_list = read_skip_list()
+ for image in images:
+ if image in skip_list:
+ continue
+ features = in_spec.image_file_to_features_file(image)
+ if features is None or not os.path.exists(features):
+ continue
+
+ # Render features and image.
+ with open(features, "r") as f:
+ lines = f.readlines()
+ skip = False
+ for line in lines:
+ line = line[:-1]
+ if in_spec.label in line:
+ skip = True
+ if skip:
+ skip_list.add(image)
+ continue
+
+ os.system(f'xv {image} &')
+ keystroke = input_utils.single_keystroke_response(
+ in_spec.valid_keystrokes,
+ prompt=in_spec.prompt,
+ )
+ os.system('killall xv')
+
+ label_value = in_spec.keystroke_to_label(keystroke)
+ with open(features, "a") as f:
+ f.write(f"{in_spec.label}: {label_value}\n")
+ skip_list.add(image)
+
+ write_skip_list(skip_list)
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities related to changing the orb's color."""
+
+import os
+
+import config
+
+
+parser = config.add_commandline_args(
+ f"Orb Utils ({__file__})",
+ "Args having to do with controlling Scott's Orb.",
+)
+parser.add_argument(
+ "--orb_utils_file_location",
+ default="/Users/scott/orb_color",
+ metavar="FILENAME",
+ type=str,
+ help="The location of the orb file on whatever machine is hosting it."
+)
+parser.add_argument(
+ "--orb_utils_user_machine",
+ default="scott@cheetah.house",
+ metavar="USER@HOSTNAME",
+ type=str,
+ help="The username/machine combo that is hosting the orb."
+)
+
+
+def make_orb(color: str) -> None:
+ user_machine = config.config['orb_utils_user_machine']
+ orbfile_path = config.config['orb_utils_file_location']
+ os.system(
+ f"ssh {user_machine} 'echo \"{color}\" > {orbfile_path}'"
+ )
--- /dev/null
+#!/usr/bin/env python3
+
+"""A decorator to help with dead simple parallelization."""
+
+from enum import Enum
+import functools
+import typing
+
+import executors
+import smart_future
+
+ps_count = 0
+thread_count = 0
+remote_count = 0
+
+
+class Method(Enum):
+ THREAD = 1
+ PROCESS = 2
+ REMOTE = 3
+
+
+def parallelize(
+ _funct: typing.Optional[typing.Callable] = None,
+ *,
+ method: Method = Method.THREAD
+) -> typing.Callable:
+ """Usage:
+
+ @parallelize # defaults to thread-mode
+ def my_function(a, b, c) -> int:
+ ...do some slow / expensive work, e.g., an http request
+
+ @parallelize(method=Method.PROCESS)
+ def my_other_function(d, e, f) -> str:
+ ...do more really expensice work, e.g., a network read
+
+ @parallelize(method=Method.REMOTE)
+ def my_other_other_function(g, h) -> int:
+ ...this work will be distributed to a remote machine pool
+
+ This decorator will invoke the wrapped function on:
+
+ Method.THREAD (default): a background thread
+ Method.PROCESS: a background process
+ Method.REMOTE: a process on a remote host
+
+ The wrapped function returns immediately with a value that is
+ wrapped in a SmartFuture. This value will block if it is either
+ read directly (via a call to result._resolve) or indirectly (by
+ using the result in an expression, printing it, hashing it,
+ passing it a function argument, etc...). See comments on the
+ SmartFuture class for details.
+
+ Note: you may stack @parallelized methods and it will "work".
+ That said, having multiple layers of Method.PROCESS or
+ Method.REMOTE may prove to be problematic because each process in
+ the stack will use its own independent pool which may overload
+ your machine with processes or your network with remote processes
+ beyond the control mechanisms built into one instance of the pool.
+ Be careful.
+ """
+ def wrapper(funct: typing.Callable):
+
+ @functools.wraps(funct)
+ def inner_wrapper(*args, **kwargs):
+ # Look for as of yet unresolved arguments in _funct's
+ # argument list and resolve them now.
+ newargs = []
+ for arg in args:
+ newargs.append(smart_future.SmartFuture.resolve(arg))
+ newkwargs = {}
+ for kw in kwargs:
+ newkwargs[kw] = smart_future.SmartFuture.resolve(
+ kwargs[kw]
+ )
+
+ executor = None
+ if method == Method.PROCESS:
+ executor = executors.DefaultExecutors().process_pool()
+ elif method == Method.THREAD:
+ executor = executors.DefaultExecutors().thread_pool()
+ elif method == Method.REMOTE:
+ executor = executors.DefaultExecutors().remote_pool()
+ assert executor is not None
+
+ future = executor.submit(funct, *newargs, **newkwargs)
+
+ # Wrap the future that's returned in a SmartFuture object
+ # so that callers do not need to call .result(), they can
+ # just use is as normal.
+ return smart_future.SmartFuture(future)
+
+ return inner_wrapper
+
+ if _funct is None:
+ return wrapper
+ else:
+ return wrapper(_funct)
--- /dev/null
+#!/usr/bin/env python3
+
+import random
+import sys
+
+import bootstrap
+import parallelize as p
+import decorator_utils
+import executors
+import math_utils
+
+
+@p.parallelize(method=p.Method.REMOTE)
+def list_primes(n):
+ """Calculates sum of all primes below given integer n"""
+ ret = []
+ for x in range(2, n):
+ ret.append(math_utils.is_prime(x))
+ return ret
+
+
+@decorator_utils.timed
+def driver() -> None:
+ results = {}
+ for _ in range(200):
+ n = random.randint(0, 100000)
+ results[n] = list_primes(n)
+ tot = 0
+ for _ in results[n]:
+ tot += _
+ print(tot)
+
+
+@bootstrap.initialize
+def main() -> None:
+ print(driver())
+ pexecutor = executors.DefaultExecutors().process_pool()
+ pexecutor.shutdown()
+ texecutor = executors.DefaultExecutors().thread_pool()
+ texecutor.shutdown()
+ rexecutor = executors.DefaultExecutors().remote_pool()
+ rexecutor.shutdown()
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ main()
+
+# print """Usage: python sum_primes.py [ncpus]
+# [ncpus] - the number of workers to run in parallel,
+# if omitted it will be set to the number of processors in the system
+# """
+
+# # tuple of all parallel python servers to connect with
+# ppservers = ()
+# #ppservers = ("10.0.0.1",)
+
+# if len(sys.argv) > 1:
+# ncpus = int(sys.argv[1])
+# # Creates jobserver with ncpus workers
+# job_server = pp.Server(ncpus, ppservers=ppservers)
+# else:
+# # Creates jobserver with automatically detected number of workers
+# job_server = pp.Server(ppservers=ppservers)
+
+# print "Starting pp with", job_server.get_ncpus(), "workers"
+
+# # Submit a job of calulating sum_primes(100) for execution.
+# # sum_primes - the function
+# # (100,) - tuple with arguments for sum_primes
+# # (isprime,) - tuple with functions on which function sum_primes depends
+# # ("math",) - tuple with module names which must be imported before sum_primes execution
+# # Execution starts as soon as one of the workers will become available
+# job1 = job_server.submit(sum_primes, (100,), (isprime,), ("math",))
+
+# # Retrieves the result calculated by job1
+# # The value of job1() is the same as sum_primes(100)
+# # If the job has not been finished yet, execution will wait here until result is available
+# result = job1()
+
+# print "Sum of primes below 100 is", result
+
+# start_time = time.time()
+
+# # The following submits 8 jobs and then retrieves the results
+# inputs = (100000, 100100, 100200, 100300, 100400, 100500, 100600, 100700)
+# jobs = [(input, job_server.submit(sum_primes,(input,), (isprime,), ("math",))) for input in inputs]
+# for input, job in jobs:
+# print "Sum of primes below", input, "is", job()
+
+# print "Time elapsed: ", time.time() - start_time, "s"
+# job_server.print_stats()
+
+# # Parallel Python Software: http://www.parallelpython.com
--- /dev/null
+#!/usr/bin/env python3
+
+import datetime
+from collections import defaultdict
+import enum
+import logging
+import re
+import sys
+from typing import Dict, List
+
+import argparse_utils
+import bootstrap
+import config
+import dict_utils
+import exec_utils
+
+logger = logging.getLogger(__name__)
+
+cfg = config.add_commandline_args(
+ f"Presence Detection ({__file__})",
+ "Args related to detection of human beings in locations.",
+)
+cfg.add_argument(
+ "--presence_macs_file",
+ type=argparse_utils.valid_filename,
+ default = "/home/scott/cron/persisted_mac_addresses.txt",
+ metavar="FILENAME",
+ help="The location of persisted_mac_addresses.txt to use."
+)
+
+
+class Person(enum.Enum):
+ UNKNOWN = 0
+ SCOTT = 1
+ LYNN = 2
+ ALEX = 3
+ AARON_AND_DANA = 4
+ AARON = 4
+ DANA = 4
+
+
+@enum.unique
+class Location(enum.Enum):
+ UNKNOWN = 0
+ HOUSE = 1
+ CABIN = 2
+
+
+class PresenceDetection(object):
+ def __init__(self) -> None:
+ # Note: list most important devices first.
+ self.devices_by_person: Dict[Person, List[str]] = {
+ Person.SCOTT: [
+ "3C:28:6D:10:6D:41",
+ "D4:61:2E:88:18:09",
+ "6C:40:08:AE:DC:2E",
+ "14:7D:DA:6A:20:D7",
+ ],
+ Person.LYNN: [
+ "08:CC:27:63:26:14",
+ "B8:31:B5:9A:4F:19",
+ ],
+ Person.ALEX: [
+ "0C:CB:85:0C:8B:AE",
+ "D0:C6:37:E3:36:9A",
+ ],
+ Person.AARON_AND_DANA: [
+ "98:B6:E9:E5:5A:7C",
+ "D6:2F:37:CA:B2:9B",
+ "6C:E8:5C:ED:17:26",
+ "90:E1:7B:13:7C:E5",
+ "6E:DC:7C:75:02:1B",
+ "B2:16:1A:93:7D:50",
+ "18:65:90:DA:3A:35",
+ "22:28:C8:7D:3C:85",
+ "B2:95:23:69:91:F8",
+ "96:69:2C:88:7A:C3",
+ ],
+ }
+ self.location_ts_by_mac: Dict[
+ Location, Dict[str, datetime.datetime]
+ ] = defaultdict(dict)
+ self.names_by_mac: Dict[str, str] = {}
+ persisted_macs = config.config['presence_macs_file']
+ self.read_persisted_macs_file(persisted_macs, Location.HOUSE)
+ raw = exec_utils.cmd(
+ "ssh scott@meerkat.cabin 'cat /home/scott/cron/persisted_mac_addresses.txt'"
+ )
+ self.parse_raw_macs_file(raw, Location.CABIN)
+ # os.remove(filename)
+
+ def read_persisted_macs_file(
+ self, filename: str, location: Location
+ ) -> None:
+ if location is Location.UNKNOWN:
+ return
+ with open(filename, "r") as rf:
+ lines = rf.read()
+ self.parse_raw_macs_file(lines, location)
+
+ def parse_raw_macs_file(self, raw: str, location: Location) -> None:
+ lines = raw.split("\n")
+
+ # CC:F4:11:D7:FA:EE, 2240, 10.0.0.22 (side_deck_high_home), Google, 1611681990
+ for line in lines:
+ line = line.strip()
+ if len(line) == 0:
+ continue
+ logger.debug(f'{location}> {line}')
+ try:
+ (mac, count, ip_name, mfg, ts) = line.split(",")
+ except Exception as e:
+ logger.error(f'SKIPPED BAD LINE> {line}')
+ logger.exception(e)
+ continue
+ mac = mac.strip()
+ (self.location_ts_by_mac[location])[
+ mac
+ ] = datetime.datetime.fromtimestamp(int(ts.strip()))
+ ip_name = ip_name.strip()
+ match = re.match(r"(\d+\.\d+\.\d+\.\d+) +\(([^\)]+)\)", ip_name)
+ if match is not None:
+ name = match.group(2)
+ self.names_by_mac[mac] = name
+
+ def is_anyone_in_location_now(self, location: Location) -> bool:
+ for person in Person:
+ if person is not None:
+ loc = self.where_is_person_now(person)
+ if location == loc:
+ return True
+ return False
+
+ def where_is_person_now(self, name: Person) -> Location:
+ if name is Person.UNKNOWN:
+ return Location.UNKNOWN
+ votes: Dict[Location, int] = {}
+ tiebreaks: Dict[Location, datetime.datetime] = {}
+ credit = 10000
+ for mac in self.devices_by_person[name]:
+ if mac not in self.names_by_mac:
+ continue
+ for location in self.location_ts_by_mac:
+ if mac in self.location_ts_by_mac[location]:
+ ts = (self.location_ts_by_mac[location])[mac]
+ tiebreaks[location] = ts
+ location = dict_utils.key_with_min_value(tiebreaks)
+ v = votes.get(location, 0)
+ votes[location] = v + credit
+ credit = int(
+ credit * 0.667
+ ) # Note: list most important devices first
+ if credit == 0:
+ credit = 1
+ if len(votes) > 0:
+ item = dict_utils.item_with_max_value(votes)
+ return item[0]
+ return Location.UNKNOWN
+
+
+@bootstrap.initialize
+def main() -> None:
+ config.parse()
+ p = PresenceDetection()
+
+ for loc in Location:
+ print(f'{loc}: {p.is_anyone_in_location_now(loc)}')
+
+ for u in Person:
+ print(f'{u}: {p.where_is_person_now(u)}')
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+#!/usr/bin/env python3
+
+"""A simple utility to unpickle some code, run it, and pickle the
+results.
+"""
+
+import os
+import platform
+import signal
+import sys
+import threading
+import time
+
+import cloudpickle # type: ignore
+import psutil # type: ignore
+
+import bootstrap
+import config
+from thread_utils import background_thread
+
+
+cfg = config.add_commandline_args(
+ f"Remote Worker ({__file__})",
+ "Helper to run pickled code remotely and return results",
+)
+cfg.add_argument(
+ '--code_file',
+ type=str,
+ required=True,
+ metavar='FILENAME',
+ help='The location of the bundle of code to execute.'
+)
+cfg.add_argument(
+ '--result_file',
+ type=str,
+ required=True,
+ metavar='FILENAME',
+ help='The location where we should write the computation results.'
+)
+
+
+@background_thread
+def watch_for_cancel(terminate_event: threading.Event) -> None:
+ p = psutil.Process(os.getpid())
+ while True:
+ saw_sshd = False
+ ancestors = p.parents()
+ for ancestor in ancestors:
+ name = ancestor.name()
+ if 'ssh' in name or 'Ssh' in name:
+ saw_sshd = True
+ break
+
+ if not saw_sshd:
+ os.system('pstree')
+ os.kill(os.getpid(), signal.SIGTERM)
+ if terminate_event.is_set():
+ return
+ time.sleep(1.0)
+
+
+@bootstrap.initialize
+def main() -> None:
+ hostname = platform.node()
+
+ # Windows-Linux is retarded.
+ if hostname != 'VIDEO-COMPUTER':
+ (thread, terminate_event) = watch_for_cancel()
+
+ in_file = config.config['code_file']
+ out_file = config.config['result_file']
+
+ with open(in_file, 'rb') as rb:
+ serialized = rb.read()
+
+ fun, args, kwargs = cloudpickle.loads(serialized)
+ ret = fun(*args, **kwargs)
+
+ serialized = cloudpickle.dumps(ret)
+ with open(out_file, 'wb') as wb:
+ wb.write(serialized)
+
+ # Windows-Linux is retarded.
+ if hostname != 'VIDEO-COMPUTER':
+ terminate_event.set()
+ thread.join()
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+#!/usr/bin/env python3
+
+from __future__ import annotations
+from collections.abc import Mapping
+import concurrent.futures as fut
+import time
+from typing import Callable, List, TypeVar
+
+from deferred_operand import DeferredOperand
+import id_generator
+
+T = TypeVar('T')
+
+
+def wait_many(futures: List[SmartFuture], *, callback: Callable = None):
+ finished: Mapping[int, bool] = {}
+ x = 0
+ while True:
+ future = futures[x]
+ if not finished.get(future.get_id(), False):
+ if future.is_ready():
+ finished[future.get_id()] = True
+ yield future
+ else:
+ if callback is not None:
+ callback()
+ time.sleep(0.1)
+ x += 1
+ if x >= len(futures):
+ x = 0
+ if len(finished) == len(futures):
+ if callback is not None:
+ callback()
+ return
+
+
+class SmartFuture(DeferredOperand):
+ """This is a SmartFuture, a class that wraps a normal Future and can
+ then be used, mostly, like a normal (non-Future) identifier.
+
+ Using a FutureWrapper in expressions will block and wait until
+ the result of the deferred operation is known.
+ """
+
+ def __init__(self, wrapped_future: fut.Future) -> None:
+ self.wrapped_future = wrapped_future
+ self.id = id_generator.get("smart_future_id")
+
+ def get_id(self) -> int:
+ return self.id
+
+ def is_ready(self) -> bool:
+ return self.wrapped_future.done()
+
+ # You shouldn't have to call this; instead, have a look at defining a
+ # method on DeferredOperand base class.
+ def _resolve(self, *, timeout=None) -> T:
+ return self.wrapped_future.result(timeout)
--- /dev/null
+#!/usr/bin/env python3
+
+from abc import ABC, abstractmethod
+import datetime
+import logging
+import time
+from typing import Dict, Optional
+
+import pytz
+
+from thread_utils import background_thread
+import math_utils
+
+logger = logging.getLogger(__name__)
+
+
+class StateTracker(ABC):
+ """A base class that maintains and updates a global state via an
+ update routine. Instances of this class should be periodically
+ invoked via the heartbeat() method. This method, in turn, invokes
+ update() with update_ids according to a schedule / periodicity
+ provided to the c'tor.
+ """
+
+ def __init__(self, update_ids_to_update_secs: Dict[str, float]) -> None:
+ """The update_ids_to_update_secs dict parameter describes one or more
+ update types (unique update_ids) and the periodicity(ies), in
+ seconds, at which it/they should be invoked.
+
+ Note that, when more than one update is overdue, they will be
+ invoked in order by their update_ids so care in choosing these
+ identifiers may be in order.
+ """
+ self.update_ids_to_update_secs = update_ids_to_update_secs
+ self.last_reminder_ts: Dict[str, Optional[datetime.datetime]] = {}
+ for x in update_ids_to_update_secs.keys():
+ self.last_reminder_ts[x] = None
+
+ @abstractmethod
+ def update(
+ self,
+ update_id: str,
+ now: datetime.datetime,
+ last_invocation: Optional[datetime.datetime],
+ ) -> None:
+ """Put whatever you want here. The update_id will be the string
+ passed to the c'tor as a key in the Dict. It will only be
+ tapped on the shoulder, at most, every update_secs seconds.
+ The now param is the approximate current timestamp and the
+ last_invocation param is the last time you were invoked (or
+ None on the first invocation)
+ """
+ pass
+
+ def heartbeat(self, *, force_all_updates_to_run: bool = False) -> None:
+ """Invoke this method to cause the StateTracker instance to identify
+ and invoke any overdue updates based on the schedule passed to
+ the c'tor. In the base StateTracker class, this method must
+ be invoked manually with a thread from external code.
+
+ If more than one type of update (update_id) are overdue,
+ they will be invoked in order based on their update_ids.
+
+ Setting force_all_updates_to_run will invoke all updates
+ (ordered by update_id) immediately ignoring whether or not
+ they are due.
+ """
+ self.now = datetime.datetime.now(tz=pytz.timezone("US/Pacific"))
+ for update_id in sorted(self.last_reminder_ts.keys()):
+ refresh_secs = self.update_ids_to_update_secs[update_id]
+ if force_all_updates_to_run:
+ logger.debug('Forcing all updates to run')
+ self.update(
+ update_id, self.now, self.last_reminder_ts[update_id]
+ )
+ self.last_reminder_ts[update_id] = self.now
+ else:
+ last_run = self.last_reminder_ts[update_id]
+ if last_run is None: # Never run before
+ logger.debug(
+ f'id {update_id} has never been run; running it now'
+ )
+ self.update(
+ update_id, self.now, self.last_reminder_ts[update_id]
+ )
+ self.last_reminder_ts[update_id] = self.now
+ else:
+ delta = self.now - last_run
+ if delta.total_seconds() >= refresh_secs: # Is overdue
+ logger.debug('id {update_id} is overdue; running it now')
+ self.update(
+ update_id,
+ self.now,
+ self.last_reminder_ts[update_id],
+ )
+ self.last_reminder_ts[update_id] = self.now
+
+
+class AutomaticStateTracker(StateTracker):
+ """Just like HeartbeatCurrentState but you don't need to pump the
+ heartbeat; it runs on a background thread. Call .shutdown() to
+ terminate the updates.
+ """
+
+ @background_thread
+ def pace_maker(self, should_terminate) -> None:
+ """Entry point for a background thread to own calling heartbeat()
+ at regular intervals so that the main thread doesn't need to do
+ so."""
+ while True:
+ if should_terminate.is_set():
+ logger.debug('pace_maker noticed event; shutting down')
+ return
+ self.heartbeat()
+ logger.debug(f'page_maker is sleeping for {self.sleep_delay}s')
+ time.sleep(self.sleep_delay)
+
+ def __init__(
+ self,
+ update_ids_to_update_secs: Dict[str, float],
+ *,
+ override_sleep_delay: Optional[float] = None,
+ ) -> None:
+ super().__init__(update_ids_to_update_secs)
+ if override_sleep_delay is not None:
+ logger.debug(f'Overriding sleep delay to {override_sleep_delay}')
+ self.sleep_delay = override_sleep_delay
+ else:
+ periods_list = list(update_ids_to_update_secs.values())
+ self.sleep_delay = math_utils.gcd_float_sequence(periods_list)
+ logger.info(f'Computed sleep_delay={self.sleep_delay}')
+ (thread, stop_event) = self.pace_maker()
+ self.should_terminate = stop_event
+ self.updater_thread = thread
+
+ def shutdown(self):
+ """Terminates the background thread and waits for it to tear down.
+ This may block for as long as self.sleep_delay.
+ """
+ logger.debug(
+ 'Setting shutdown event and waiting for background thread.'
+ )
+ self.should_terminate.set()
+ self.updater_thread.join()
+ logger.debug('Background thread terminated.')
--- /dev/null
+#!/usr/bin/env python3
+
+import json
+import random
+import re
+import string
+from typing import Any, List, Optional
+import unicodedata
+from uuid import uuid4
+
+NUMBER_RE = re.compile(r"^([+\-]?)((\d+)(\.\d+)?([e|E]\d+)?|\.\d+)$")
+
+HEX_NUMBER_RE = re.compile(r"^([+|-]?)0[x|X]([0-9A-Fa-f]+)$")
+
+OCT_NUMBER_RE = re.compile(r"^([+|-]?)0[O|o]([0-7]+)$")
+
+BIN_NUMBER_RE = re.compile(r"^([+|-]?)0[B|b]([0|1]+)$")
+
+URLS_RAW_STRING = (
+ r"([a-z-]+://)" # scheme
+ r"([a-z_\d-]+:[a-z_\d-]+@)?" # user:password
+ r"(www\.)?" # www.
+ r"((?<!\.)[a-z\d]+[a-z\d.-]+\.[a-z]{2,6}|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|localhost)" # domain
+ r"(:\d{2,})?" # port number
+ r"(/[a-z\d_%+-]*)*" # folders
+ r"(\.[a-z\d_%+-]+)*" # file extension
+ r"(\?[a-z\d_+%-=]*)?" # query string
+ r"(#\S*)?" # hash
+)
+
+URL_RE = re.compile(r"^{}$".format(URLS_RAW_STRING), re.IGNORECASE)
+
+URLS_RE = re.compile(r"({})".format(URLS_RAW_STRING), re.IGNORECASE)
+
+ESCAPED_AT_SIGN = re.compile(r'(?!"[^"]*)@+(?=[^"]*")|\\@')
+
+EMAILS_RAW_STRING = r"[a-zA-Z\d._\+\-'`!%#$&*/=\?\^\{\}\|~\\]+@[a-z\d-]+\.?[a-z\d-]+\.[a-z]{2,4}"
+
+EMAIL_RE = re.compile(r"^{}$".format(EMAILS_RAW_STRING))
+
+EMAILS_RE = re.compile(r"({})".format(EMAILS_RAW_STRING))
+
+CAMEL_CASE_TEST_RE = re.compile(
+ r"^[a-zA-Z]*([a-z]+[A-Z]+|[A-Z]+[a-z]+)[a-zA-Z\d]*$"
+)
+
+CAMEL_CASE_REPLACE_RE = re.compile(r"([a-z]|[A-Z]+)(?=[A-Z])")
+
+SNAKE_CASE_TEST_RE = re.compile(
+ r"^([a-z]+\d*_[a-z\d_]*|_+[a-z\d]+[a-z\d_]*)$", re.IGNORECASE
+)
+
+SNAKE_CASE_TEST_DASH_RE = re.compile(
+ r"([a-z]+\d*-[a-z\d-]*|-+[a-z\d]+[a-z\d-]*)$", re.IGNORECASE
+)
+
+SNAKE_CASE_REPLACE_RE = re.compile(r"(_)([a-z\d])")
+
+SNAKE_CASE_REPLACE_DASH_RE = re.compile(r"(-)([a-z\d])")
+
+CREDIT_CARDS = {
+ "VISA": re.compile(r"^4\d{12}(?:\d{3})?$"),
+ "MASTERCARD": re.compile(r"^5[1-5]\d{14}$"),
+ "AMERICAN_EXPRESS": re.compile(r"^3[47]\d{13}$"),
+ "DINERS_CLUB": re.compile(r"^3(?:0[0-5]|[68]\d)\d{11}$"),
+ "DISCOVER": re.compile(r"^6(?:011|5\d{2})\d{12}$"),
+ "JCB": re.compile(r"^(?:2131|1800|35\d{3})\d{11}$"),
+}
+
+JSON_WRAPPER_RE = re.compile(
+ r"^\s*[\[{]\s*(.*)\s*[\}\]]\s*$", re.MULTILINE | re.DOTALL
+)
+
+UUID_RE = re.compile(
+ r"^[a-f\d]{8}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{12}$", re.IGNORECASE
+)
+
+UUID_HEX_OK_RE = re.compile(
+ r"^[a-f\d]{8}-?[a-f\d]{4}-?[a-f\d]{4}-?[a-f\d]{4}-?[a-f\d]{12}$",
+ re.IGNORECASE,
+)
+
+SHALLOW_IP_V4_RE = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
+
+IP_V6_RE = re.compile(r"^([a-z\d]{0,4}:){7}[a-z\d]{0,4}$", re.IGNORECASE)
+
+MAC_ADDRESS_RE = re.compile(
+ r"^([0-9A-F]{2}[:-]){5}([0-9A-F]{2})", re.IGNORECASE
+)
+
+WORDS_COUNT_RE = re.compile(
+ r"\W*[^\W_]+\W*", re.IGNORECASE | re.MULTILINE | re.UNICODE
+)
+
+HTML_RE = re.compile(
+ r"((<([a-z]+:)?[a-z]+[^>]*/?>)(.*?(</([a-z]+:)?[a-z]+>))?|<!--.*-->|<!doctype.*>)",
+ re.IGNORECASE | re.MULTILINE | re.DOTALL,
+)
+
+HTML_TAG_ONLY_RE = re.compile(
+ r"(<([a-z]+:)?[a-z]+[^>]*/?>|</([a-z]+:)?[a-z]+>|<!--.*-->|<!doctype.*>)",
+ re.IGNORECASE | re.MULTILINE | re.DOTALL,
+)
+
+SPACES_RE = re.compile(r"\s")
+
+NO_LETTERS_OR_NUMBERS_RE = re.compile(
+ r"[^\w\d]+|_+", re.IGNORECASE | re.UNICODE
+)
+
+MARGIN_RE = re.compile(r"^[^\S\r\n]+")
+
+ESCAPE_SEQUENCE_RE = re.compile(r"\e\[[^A-Za-z]*[A-Za-z]")
+
+NUM_SUFFIXES = {
+ "Pb": (1024 ** 5),
+ "P": (1024 ** 5),
+ "Tb": (1024 ** 4),
+ "T": (1024 ** 4),
+ "Gb": (1024 ** 3),
+ "G": (1024 ** 3),
+ "Mb": (1024 ** 2),
+ "M": (1024 ** 2),
+ "Kb": (1024 ** 1),
+ "K": (1024 ** 1),
+}
+
+
+def is_none_or_empty(in_str: Optional[str]) -> bool:
+ return in_str is None or len(in_str.strip()) == 0
+
+
+def is_string(obj: Any) -> bool:
+ """
+ Checks if an object is a string.
+ """
+ return isinstance(obj, str)
+
+
+def is_empty_string(in_str: Any) -> bool:
+ return is_string(in_str) and in_str.strip() == ""
+
+
+def is_full_string(in_str: Any) -> bool:
+ return is_string(in_str) and in_str.strip() != ""
+
+
+def is_number(in_str: str) -> bool:
+ """
+ Checks if a string is a valid number.
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return NUMBER_RE.match(in_str) is not None
+
+
+def is_integer_number(in_str: str) -> bool:
+ """
+ Checks whether the given string represents an integer or not.
+
+ An integer may be signed or unsigned or use a "scientific notation".
+
+ *Examples:*
+
+ >>> is_integer('42') # returns true
+ >>> is_integer('42.0') # returns false
+ """
+ return (
+ (is_number(in_str) and "." not in in_str) or
+ is_hexidecimal_integer_number(in_str) or
+ is_octal_integer_number(in_str) or
+ is_binary_integer_number(in_str)
+ )
+
+
+def is_hexidecimal_integer_number(in_str: str) -> bool:
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return HEX_NUMBER_RE.match(in_str) is not None
+
+
+def is_octal_integer_number(in_str: str) -> bool:
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return OCT_NUMBER_RE.match(in_str) is not None
+
+
+def is_binary_integer_number(in_str: str) -> bool:
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return BIN_NUMBER_RE.match(in_str) is not None
+
+
+def to_int(in_str: str) -> int:
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ if is_binary_integer_number(in_str):
+ return int(in_str, 2)
+ if is_octal_integer_number(in_str):
+ return int(in_str, 8)
+ if is_hexidecimal_integer_number(in_str):
+ return int(in_str, 16)
+ return int(in_str)
+
+
+def is_decimal_number(in_str: str) -> bool:
+ """
+ Checks whether the given string represents a decimal or not.
+
+ A decimal may be signed or unsigned or use a "scientific notation".
+
+ >>> is_decimal('42.0') # returns true
+ >>> is_decimal('42') # returns false
+ """
+ return is_number(in_str) and "." in in_str
+
+
+def strip_escape_sequences(in_str: str) -> str:
+ in_str = ESCAPE_SEQUENCE_RE.sub("", in_str)
+ return in_str
+
+
+# Full url example:
+# scheme://username:password@www.domain.com:8042/folder/subfolder/file.extension?param=value¶m2=value2#hash
+def is_url(in_str: Any, allowed_schemes: Optional[List[str]] = None) -> bool:
+ """
+ Check if a string is a valid url.
+
+ *Examples:*
+
+ >>> is_url('http://www.mysite.com') # returns true
+ >>> is_url('https://mysite.com') # returns true
+ >>> is_url('.mysite.com') # returns false
+ """
+ if not is_full_string(in_str):
+ return False
+
+ valid = URL_RE.match(in_str) is not None
+
+ if allowed_schemes:
+ return valid and any([in_str.startswith(s) for s in allowed_schemes])
+ return valid
+
+
+def is_email(in_str: Any) -> bool:
+ """
+ Check if a string is a valid email.
+
+ Reference: https://tools.ietf.org/html/rfc3696#section-3
+
+ *Examples:*
+
+ >>> is_email('my.email@the-provider.com') # returns true
+ >>> is_email('@gmail.com') # returns false
+ """
+ if (
+ not is_full_string(in_str)
+ or len(in_str) > 320
+ or in_str.startswith(".")
+ ):
+ return False
+
+ try:
+ # we expect 2 tokens, one before "@" and one after, otherwise
+ # we have an exception and the email is not valid.
+ head, tail = in_str.split("@")
+
+ # head's size must be <= 64, tail <= 255, head must not start
+ # with a dot or contain multiple consecutive dots.
+ if (
+ len(head) > 64
+ or len(tail) > 255
+ or head.endswith(".")
+ or (".." in head)
+ ):
+ return False
+
+ # removes escaped spaces, so that later on the test regex will
+ # accept the string.
+ head = head.replace("\\ ", "")
+ if head.startswith('"') and head.endswith('"'):
+ head = head.replace(" ", "")[1:-1]
+ return EMAIL_RE.match(head + "@" + tail) is not None
+
+ except ValueError:
+ # borderline case in which we have multiple "@" signs but the
+ # head part is correctly escaped.
+ if ESCAPED_AT_SIGN.search(in_str) is not None:
+ # replace "@" with "a" in the head
+ return is_email(ESCAPED_AT_SIGN.sub("a", in_str))
+ return False
+
+
+def suffix_string_to_number(in_str: str) -> Optional[int]:
+ """Take a string like "33Gb" and convert it into a number (of bytes)
+ like 34603008. Return None if the input string is not valid.
+ """
+
+ def suffix_capitalize(s: str) -> str:
+ if len(s) == 1:
+ return s.upper()
+ elif len(s) == 2:
+ return f"{s[0].upper()}{s[1].lower()}"
+ return suffix_capitalize(s[0:1])
+
+ if is_string(in_str):
+ if is_integer_number(in_str):
+ return to_int(in_str)
+ suffixes = [in_str[-2:], in_str[-1:]]
+ rest = [in_str[:-2], in_str[:-1]]
+ for x in range(len(suffixes)):
+ s = suffixes[x]
+ s = suffix_capitalize(s)
+ multiplier = NUM_SUFFIXES.get(s, None)
+ if multiplier is not None:
+ r = rest[x]
+ if is_integer_number(r):
+ return int(r) * multiplier
+ return None
+
+
+def number_to_suffix_string(num: int) -> Optional[str]:
+ """Take a number (of bytes) and returns a string like "43.8Gb".
+ Returns none if the input is invalid.
+ """
+ d = 0.0
+ suffix = None
+ for (sfx, size) in NUM_SUFFIXES.items():
+ if num > size:
+ d = num / size
+ suffix = sfx
+ break
+ if suffix is not None:
+ return f"{d:.1f}{suffix}"
+ return None
+
+
+def is_credit_card(in_str: Any, card_type: str = None) -> bool:
+ """
+ Checks if a string is a valid credit card number.
+ If card type is provided then it checks against that specific type only,
+ otherwise any known credit card number will be accepted.
+
+ Supported card types are the following:
+
+ - VISA
+ - MASTERCARD
+ - AMERICAN_EXPRESS
+ - DINERS_CLUB
+ - DISCOVER
+ - JCB
+ """
+ if not is_full_string(in_str):
+ return False
+
+ if card_type is not None:
+ if card_type not in CREDIT_CARDS:
+ raise KeyError(
+ f'Invalid card type "{card_type}". Valid types are: {CREDIT_CARDS.keys()}'
+ )
+ return CREDIT_CARDS[card_type].match(in_str) is not None
+ for c in CREDIT_CARDS:
+ if CREDIT_CARDS[c].match(in_str) is not None:
+ return True
+ return False
+
+
+def is_camel_case(in_str: Any) -> bool:
+ """
+ Checks if a string is formatted as camel case.
+
+ A string is considered camel case when:
+
+ - it's composed only by letters ([a-zA-Z]) and optionally numbers ([0-9])
+ - it contains both lowercase and uppercase letters
+ - it does not start with a number
+ """
+ return (
+ is_full_string(in_str) and CAMEL_CASE_TEST_RE.match(in_str) is not None
+ )
+
+
+def is_snake_case(in_str: Any, *, separator: str = "_") -> bool:
+ """
+ Checks if a string is formatted as "snake case".
+
+ A string is considered snake case when:
+
+ - it's composed only by lowercase/uppercase letters and digits
+ - it contains at least one underscore (or provided separator)
+ - it does not start with a number
+ """
+ if is_full_string(in_str):
+ re_map = {"_": SNAKE_CASE_TEST_RE, "-": SNAKE_CASE_TEST_DASH_RE}
+ re_template = (
+ r"([a-z]+\d*{sign}[a-z\d{sign}]*|{sign}+[a-z\d]+[a-z\d{sign}]*)"
+ )
+ r = re_map.get(
+ separator,
+ re.compile(
+ re_template.format(sign=re.escape(separator)), re.IGNORECASE
+ ),
+ )
+ return r.match(in_str) is not None
+ return False
+
+
+def is_json(in_str: Any) -> bool:
+ """
+ Check if a string is a valid json.
+
+ *Examples:*
+
+ >>> is_json('{"name": "Peter"}') # returns true
+ >>> is_json('[1, 2, 3]') # returns true
+ >>> is_json('{nope}') # returns false
+ """
+ if is_full_string(in_str) and JSON_WRAPPER_RE.match(in_str) is not None:
+ try:
+ return isinstance(json.loads(in_str), (dict, list))
+ except (TypeError, ValueError, OverflowError):
+ pass
+ return False
+
+
+def is_uuid(in_str: Any, allow_hex: bool = False) -> bool:
+ """
+ Check if a string is a valid UUID.
+
+ *Example:*
+
+ >>> is_uuid('6f8aa2f9-686c-4ac3-8766-5712354a04cf') # returns true
+ >>> is_uuid('6f8aa2f9686c4ac387665712354a04cf') # returns false
+ >>> is_uuid('6f8aa2f9686c4ac387665712354a04cf', allow_hex=True) # returns true
+ """
+ # string casting is used to allow UUID itself as input data type
+ s = str(in_str)
+ if allow_hex:
+ return UUID_HEX_OK_RE.match(s) is not None
+ return UUID_RE.match(s) is not None
+
+
+def is_ip_v4(in_str: Any) -> bool:
+ """
+ Checks if a string is a valid ip v4.
+
+ *Examples:*
+
+ >>> is_ip_v4('255.200.100.75') # returns true
+ >>> is_ip_v4('nope') # returns false (not an ip)
+ >>> is_ip_v4('255.200.100.999') # returns false (999 is out of range)
+ """
+ if not is_full_string(in_str) or SHALLOW_IP_V4_RE.match(in_str) is None:
+ return False
+
+ # checks that each entry in the ip is in the valid range (0 to 255)
+ for token in in_str.split("."):
+ if not 0 <= int(token) <= 255:
+ return False
+ return True
+
+
+def extract_ip_v4(in_str: Any) -> Optional[str]:
+ """
+ Extracts the IPv4 chunk of a string or None.
+ """
+ if not is_full_string(in_str):
+ return None
+ in_str.strip()
+ m = SHALLOW_IP_V4_RE.match(in_str)
+ if m is not None:
+ return m.group(0)
+ return None
+
+
+def is_ip_v6(in_str: Any) -> bool:
+ """
+ Checks if a string is a valid ip v6.
+
+ *Examples:*
+
+ >>> is_ip_v6('2001:db8:85a3:0000:0000:8a2e:370:7334') # returns true
+ >>> is_ip_v6('2001:db8:85a3:0000:0000:8a2e:370:?') # returns false (invalid "?")
+ """
+ return is_full_string(in_str) and IP_V6_RE.match(in_str) is not None
+
+
+def extract_ip_v6(in_str: Any) -> Optional[str]:
+ """
+ Extract IPv6 chunk or None.
+ """
+ if not is_full_string(in_str):
+ return None
+ in_str.strip()
+ m = IP_V6_RE.match(in_str)
+ if m is not None:
+ return m.group(0)
+ return None
+
+
+def is_ip(in_str: Any) -> bool:
+ """
+ Checks if a string is a valid ip (either v4 or v6).
+
+ *Examples:*
+
+ >>> is_ip('255.200.100.75') # returns true
+ >>> is_ip('2001:db8:85a3:0000:0000:8a2e:370:7334') # returns true
+ >>> is_ip('1.2.3') # returns false
+ """
+ return is_ip_v6(in_str) or is_ip_v4(in_str)
+
+
+def extract_ip(in_str: Any) -> Optional[str]:
+ """Extract the IP address or None."""
+ ip = extract_ip_v4(in_str)
+ if ip is None:
+ ip = extract_ip_v6(in_str)
+ return ip
+
+
+def is_mac_address(in_str: Any) -> bool:
+ """Return True if in_str is a valid MAC address false otherwise."""
+ return is_full_string(in_str) and MAC_ADDRESS_RE.match(in_str) is not None
+
+
+def extract_mac_address(in_str: Any, *, separator: str = ":") -> Optional[str]:
+ """Extract the MAC address from in_str"""
+ if not is_full_string(in_str):
+ return None
+ in_str.strip()
+ m = MAC_ADDRESS_RE.match(in_str)
+ if m is not None:
+ mac = m.group(0)
+ mac.replace(":", separator)
+ mac.replace("-", separator)
+ return mac
+ return None
+
+
+def is_slug(in_str: Any, separator: str = "-") -> bool:
+ """
+ Checks if a given string is a slug (as created by `slugify()`).
+
+ *Examples:*
+
+ >>> is_slug('my-blog-post-title') # returns true
+ >>> is_slug('My blog post title') # returns false
+
+ :param in_str: String to check.
+ :type in_str: str
+ :param separator: Join sign used by the slug.
+ :type separator: str
+ :return: True if slug, false otherwise.
+ """
+ if not is_full_string(in_str):
+ return False
+ rex = r"^([a-z\d]+" + re.escape(separator) + r"*?)*[a-z\d]$"
+ return re.match(rex, in_str) is not None
+
+
+def contains_html(in_str: str) -> bool:
+ """
+ Checks if the given string contains HTML/XML tags.
+
+ By design, this function matches ANY type of tag, so don't expect to use it
+ as an HTML validator, its goal is to detect "malicious" or undesired tags in the text.
+
+ *Examples:*
+
+ >>> contains_html('my string is <strong>bold</strong>') # returns true
+ >>> contains_html('my string is not bold') # returns false
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return HTML_RE.search(in_str) is not None
+
+
+def words_count(in_str: str) -> int:
+ """
+ Returns the number of words contained into the given string.
+
+ This method is smart, it does consider only sequence of one or more letter and/or numbers
+ as "words", so a string like this: "! @ # % ... []" will return zero!
+ Moreover it is aware of punctuation, so the count for a string like "one,two,three.stop"
+ will be 4 not 1 (even if there are no spaces in the string).
+
+ *Examples:*
+
+ >>> words_count('hello world') # returns 2
+ >>> words_count('one,two,three.stop') # returns 4
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return len(WORDS_COUNT_RE.findall(in_str))
+
+
+def generate_uuid(as_hex: bool = False) -> str:
+ """
+ Generated an UUID string (using `uuid.uuid4()`).
+
+ *Examples:*
+
+ >>> uuid() # possible output: '97e3a716-6b33-4ab9-9bb1-8128cb24d76b'
+ >>> uuid(as_hex=True) # possible output: '97e3a7166b334ab99bb18128cb24d76b'
+ """
+ uid = uuid4()
+ if as_hex:
+ return uid.hex
+ return str(uid)
+
+
+def generate_random_alphanumeric_string(size: int) -> str:
+ """
+ Returns a string of the specified size containing random
+ characters (uppercase/lowercase ascii letters and digits).
+
+ *Example:*
+
+ >>> random_string(9) # possible output: "cx3QQbzYg"
+ """
+ if size < 1:
+ raise ValueError("size must be >= 1")
+ chars = string.ascii_letters + string.digits
+ buffer = [random.choice(chars) for _ in range(size)]
+ return from_char_list(buffer)
+
+
+def reverse(in_str: str) -> str:
+ """
+ Returns the string with its chars reversed.
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return in_str[::-1]
+
+
+def camel_case_to_snake_case(in_str, *, separator="_"):
+ """
+ Convert a camel case string into a snake case one.
+ (The original string is returned if is not a valid camel case string)
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ if not is_camel_case(in_str):
+ return in_str
+ return CAMEL_CASE_REPLACE_RE.sub(
+ lambda m: m.group(1) + separator, in_str
+ ).lower()
+
+
+def snake_case_to_camel_case(
+ in_str: str, *, upper_case_first: bool = True, separator: str = "_"
+) -> str:
+ """
+ Convert a snake case string into a camel case one.
+ (The original string is returned if is not a valid snake case string)
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ if not is_snake_case(in_str, separator=separator):
+ return in_str
+ tokens = [s.title() for s in in_str.split(separator) if is_full_string(s)]
+ if not upper_case_first:
+ tokens[0] = tokens[0].lower()
+ return from_char_list(tokens)
+
+
+def to_char_list(in_str: str) -> List[str]:
+ if not is_string(in_str):
+ return []
+ return list(in_str)
+
+
+def from_char_list(in_list: List[str]) -> str:
+ return "".join(in_list)
+
+
+def shuffle(in_str: str) -> str:
+ """Return a new string containing same chars of the given one but in
+ a randomized order.
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+
+ # turn the string into a list of chars
+ chars = to_char_list(in_str)
+ random.shuffle(chars)
+ return from_char_list(chars)
+
+
+def strip_html(in_str: str, keep_tag_content: bool = False) -> str:
+ """
+ Remove html code contained into the given string.
+
+ *Examples:*
+
+ >>> strip_html('test: <a href="foo/bar">click here</a>') # returns 'test: '
+ >>> strip_html('test: <a href="foo/bar">click here</a>', keep_tag_content=True) # returns 'test: click here'
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ r = HTML_TAG_ONLY_RE if keep_tag_content else HTML_RE
+ return r.sub("", in_str)
+
+
+def asciify(in_str: str) -> str:
+ """
+ Force string content to be ascii-only by translating all non-ascii chars into the closest possible representation
+ (eg: ó -> o, Ë -> E, ç -> c...).
+
+ **Bear in mind**: Some chars may be lost if impossible to translate.
+
+ *Example:*
+
+ >>> asciify('èéùúòóäåëýñÅÀÁÇÌÍÑÓË') # returns 'eeuuooaaeynAAACIINOE'
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+
+ # "NFKD" is the algorithm which is able to successfully translate
+ # the most of non-ascii chars.
+ normalized = unicodedata.normalize("NFKD", in_str)
+
+ # encode string forcing ascii and ignore any errors
+ # (unrepresentable chars will be stripped out)
+ ascii_bytes = normalized.encode("ascii", "ignore")
+
+ # turns encoded bytes into an utf-8 string
+ return ascii_bytes.decode("utf-8")
+
+
+def slugify(in_str: str, *, separator: str = "-") -> str:
+ """
+ Converts a string into a "slug" using provided separator.
+ The returned string has the following properties:
+
+ - it has no spaces
+ - all letters are in lower case
+ - all punctuation signs and non alphanumeric chars are removed
+ - words are divided using provided separator
+ - all chars are encoded as ascii (by using `asciify()`)
+ - is safe for URL
+
+ *Examples:*
+
+ >>> slugify('Top 10 Reasons To Love Dogs!!!') # returns: 'top-10-reasons-to-love-dogs'
+ >>> slugify('Mönstér Mägnët') # returns 'monster-magnet'
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+
+ # replace any character that is NOT letter or number with spaces
+ out = NO_LETTERS_OR_NUMBERS_RE.sub(" ", in_str.lower()).strip()
+
+ # replace spaces with join sign
+ out = SPACES_RE.sub(separator, out)
+
+ # normalize joins (remove duplicates)
+ out = re.sub(re.escape(separator) + r"+", separator, out)
+ return asciify(out)
+
+
+def to_bool(in_str: str) -> bool:
+ """
+ Turns a string into a boolean based on its content (CASE INSENSITIVE).
+
+ A positive boolean (True) is returned if the string value is one of the following:
+
+ - "true"
+ - "1"
+ - "yes"
+ - "y"
+
+ Otherwise False is returned.
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ return in_str.lower() in ("true", "1", "yes", "y", "t")
+
+
+def dedent(in_str: str) -> str:
+ """
+ Removes tab indentation from multi line strings (inspired by analogous Scala function).
+
+ *Example:*
+
+ >>> strip_margin('''
+ >>> line 1
+ >>> line 2
+ >>> line 3
+ >>> ''')
+ >>> # returns:
+ >>> '''
+ >>> line 1
+ >>> line 2
+ >>> line 3
+ >>> '''
+ """
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ line_separator = '\n'
+ lines = [MARGIN_RE.sub('', line) for line in in_str.split(line_separator)]
+ return line_separator.join(lines)
+
+
+def indent(in_str: str, amount: int) -> str:
+ if not is_string(in_str):
+ raise ValueError(in_str)
+ line_separator = '\n'
+ lines = [" " * amount + line for line in in_str.split(line_separator)]
+ return line_separator.join(lines)
+
+
+def sprintf(*args, **kwargs) -> str:
+ ret = ""
+
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+
+ if kwargs:
+ raise TypeError("invalid keyword arguments to sprint()")
+
+ if sep is None:
+ sep = " "
+ if end is None:
+ end = "\n"
+ for i, arg in enumerate(args):
+ if i:
+ ret += sep
+ if isinstance(arg, str):
+ ret += arg
+ else:
+ ret += str(arg)
+ ret += end
+ return ret
--- /dev/null
+#!/usr/bin/env python3
+
+import unittest
+
+from ansi import fg, bg, reset
+import string_utils as su
+
+
+class TestStringUtils(unittest.TestCase):
+ def test_is_none_or_empty(self):
+ self.assertTrue(su.is_none_or_empty(None))
+ self.assertTrue(su.is_none_or_empty(""))
+ self.assertTrue(su.is_none_or_empty("\n"))
+ self.assertTrue(su.is_none_or_empty(' '))
+ self.assertTrue(su.is_none_or_empty(' \n \r \t '))
+ self.assertFalse(su.is_none_or_empty("Covfefe"))
+ self.assertFalse(su.is_none_or_empty("1234"))
+
+ def test_is_string(self):
+ self.assertTrue(su.is_string("test"))
+ self.assertTrue(su.is_string(""))
+ self.assertFalse(su.is_string(bytes(0x1234)))
+ self.assertFalse(su.is_string(1234))
+
+ def test_is_empty_string(self):
+ self.assertTrue(su.is_empty_string(''))
+ self.assertTrue(su.is_empty_string(' \t\t \n \r '))
+ self.assertFalse(su.is_empty_string(' this is a test '))
+ self.assertFalse(su.is_empty_string(22))
+
+ def test_is_full_string(self):
+ self.assertFalse(su.is_full_string(''))
+ self.assertFalse(su.is_full_string(' \t\t \n \r '))
+ self.assertTrue(su.is_full_string(' this is a test '))
+ self.assertFalse(su.is_full_string(22))
+
+ def test_is_number(self):
+ self.assertTrue(su.is_number("1234"))
+ self.assertTrue(su.is_number("-1234"))
+ self.assertTrue(su.is_number("1234.55"))
+ self.assertTrue(su.is_number("-1234.55"))
+ self.assertTrue(su.is_number("+1234"))
+ self.assertTrue(su.is_number("+1234.55"))
+ self.assertTrue(su.is_number("-0.8485996602e10"))
+ self.assertTrue(su.is_number("-0.8485996602E10"))
+ self.assertFalse(su.is_number("-0.8485996602t10"))
+ self.assertFalse(su.is_number(" 1234 "))
+ self.assertFalse(su.is_number(" 1234"))
+ self.assertFalse(su.is_number("1234 "))
+ self.assertFalse(su.is_number("fifty"))
+
+ def test_is_integer_number(self):
+ self.assertTrue(su.is_integer_number("1234"))
+ self.assertTrue(su.is_integer_number("-1234"))
+ self.assertFalse(su.is_integer_number("1234.55"))
+ self.assertFalse(su.is_integer_number("-1234.55"))
+ self.assertTrue(su.is_integer_number("+1234"))
+ self.assertTrue(su.is_integer_number("0x1234"))
+ self.assertTrue(su.is_integer_number("0xdeadbeef"))
+ self.assertFalse(su.is_integer_number("+1234.55"))
+ self.assertTrue(su.is_octal_integer_number("+0o777"))
+ self.assertFalse(su.is_integer_number("-0.8485996602e10"))
+ self.assertFalse(su.is_integer_number("-0.8485996602E10"))
+ self.assertFalse(su.is_integer_number("-0.8485996602t10"))
+ self.assertFalse(su.is_integer_number(" 1234 "))
+ self.assertFalse(su.is_integer_number(" 1234"))
+ self.assertFalse(su.is_integer_number("1234 "))
+ self.assertFalse(su.is_integer_number("fifty"))
+
+ def test_is_hexidecimal_integer_number(self):
+ self.assertTrue(su.is_hexidecimal_integer_number("0x1234"))
+ self.assertTrue(su.is_hexidecimal_integer_number("0X1234"))
+ self.assertTrue(su.is_hexidecimal_integer_number("0x1234D"))
+ self.assertTrue(su.is_hexidecimal_integer_number("0xF1234"))
+ self.assertTrue(su.is_hexidecimal_integer_number("0xe1234"))
+ self.assertTrue(su.is_hexidecimal_integer_number("0x1234a"))
+ self.assertTrue(su.is_hexidecimal_integer_number("0xdeadbeef"))
+ self.assertTrue(su.is_hexidecimal_integer_number("-0xdeadbeef"))
+ self.assertTrue(su.is_hexidecimal_integer_number("+0xdeadbeef"))
+ self.assertFalse(su.is_hexidecimal_integer_number("0xH1234"))
+ self.assertFalse(su.is_hexidecimal_integer_number("0x1234H"))
+ self.assertFalse(su.is_hexidecimal_integer_number("nine"))
+
+ def test_is_octal_integer_number(self):
+ self.assertTrue(su.is_octal_integer_number("0o111"))
+ self.assertTrue(su.is_octal_integer_number("0O111"))
+ self.assertTrue(su.is_octal_integer_number("-0o111"))
+ self.assertTrue(su.is_octal_integer_number("+0o777"))
+ self.assertFalse(su.is_octal_integer_number("-+0o111"))
+ self.assertFalse(su.is_octal_integer_number("0o181"))
+ self.assertFalse(su.is_octal_integer_number("0o1a1"))
+ self.assertFalse(su.is_octal_integer_number("one"))
+
+ def test_is_binary_integer_number(self):
+ self.assertTrue(su.is_binary_integer_number("0b10100101110"))
+ self.assertTrue(su.is_binary_integer_number("+0b10100101110"))
+ self.assertTrue(su.is_binary_integer_number("-0b10100101110"))
+ self.assertTrue(su.is_binary_integer_number("0B10100101110"))
+ self.assertTrue(su.is_binary_integer_number("+0B10100101110"))
+ self.assertTrue(su.is_binary_integer_number("-0B10100101110"))
+ self.assertFalse(su.is_binary_integer_number("-0B10100101110 "))
+ self.assertFalse(su.is_binary_integer_number(" -0B10100101110"))
+ self.assertFalse(su.is_binary_integer_number("-0B10100 101110"))
+ self.assertFalse(su.is_binary_integer_number("0b10100201110"))
+ self.assertFalse(su.is_binary_integer_number("0b10100101e110"))
+ self.assertFalse(su.is_binary_integer_number("fred"))
+
+ def test_to_int(self):
+ self.assertEqual(su.to_int("1234"), 1234)
+ self.assertEqual(su.to_int("0x1234"), 4660)
+ self.assertEqual(su.to_int("0o777"), 511)
+ self.assertEqual(su.to_int("0b111"), 7)
+
+ def test_is_decimal_number(self):
+ self.assertTrue(su.is_decimal_number('4.3'))
+ self.assertTrue(su.is_decimal_number('.3'))
+ self.assertTrue(su.is_decimal_number('0.3'))
+ self.assertFalse(su.is_decimal_number('3.'))
+ self.assertTrue(su.is_decimal_number('3.0'))
+ self.assertTrue(su.is_decimal_number('3.0492949249e20'))
+ self.assertFalse(su.is_decimal_number('3'))
+ self.assertFalse(su.is_decimal_number('0x11'))
+
+ def test_strip_escape_sequences(self):
+ s = f' {fg("red")} this is a test {bg("white")} this is a test {reset()} '
+ self.assertEqual(
+ su.strip_escape_sequences(s),
+ ' this is a test this is a test '
+ )
+ s = ' this is another test '
+ self.assertEqual(su.strip_escape_sequences(s), s)
+
+ def test_is_url(self):
+ self.assertTrue(su.is_url("http://host.domain/uri/uri#shard?param=value+s"))
+ self.assertTrue(su.is_url("http://127.0.0.1/uri/uri#shard?param=value+s"))
+ self.assertTrue(su.is_url("http://user:pass@127.0.0.1:81/uri/uri#shard?param=value+s"))
+ self.assertTrue(su.is_url("ftp://127.0.0.1/uri/uri"))
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null
+#!/usr/bin/env python3
+
+"""Utilities for dealing with "text"."""
+
+import math
+import sys
+from typing import List, NamedTuple
+
+from ansi import fg, reset
+import exec_utils
+
+
+class RowsColumns(NamedTuple):
+ rows: int
+ columns: int
+
+
+def get_console_rows_columns() -> RowsColumns:
+ rows, columns = exec_utils.cmd("stty size").split()
+ return RowsColumns(int(rows), int(columns))
+
+
+def progress_graph(
+ current: int,
+ total: int,
+ *,
+ width=70,
+ fgcolor=fg("school bus yellow"),
+ left_end="[",
+ right_end="]",
+ redraw=True,
+) -> None:
+ percent = current / total
+ ret = "\r" if redraw else "\n"
+ bar = bar_graph(
+ percent,
+ include_text = True,
+ width = width,
+ fgcolor = fgcolor,
+ left_end = left_end,
+ right_end = right_end)
+ print(
+ bar,
+ end=ret,
+ flush=True,
+ file=sys.stderr)
+
+
+def bar_graph(
+ percentage: float,
+ *,
+ include_text=True,
+ width=70,
+ fgcolor=fg("school bus yellow"),
+ left_end="[",
+ right_end="]",
+) -> None:
+ if percentage < 0.0 or percentage > 1.0:
+ raise ValueError(percentage)
+ if include_text:
+ text = f"{percentage*100.0:2.1f}%"
+ else:
+ text = ""
+ whole_width = math.floor(percentage * width)
+ if whole_width == width:
+ whole_width -= 1
+ part_char = "▉"
+ else:
+ remainder_width = (percentage * width) % 1
+ part_width = math.floor(remainder_width * 8)
+ part_char = [" ", "▏", "▎", "▍", "▌", "▋", "▊", "▉"][part_width]
+ return (
+ left_end +
+ fgcolor +
+ "█" * whole_width + part_char +
+ " " * (width - whole_width - 1) +
+ reset() +
+ right_end + " " +
+ text)
+
+
+def distribute_strings(
+ strings: List[str],
+ *,
+ width: int = 80,
+ alignment: str = "c",
+ padding: str = " ",
+) -> str:
+ subwidth = math.floor(width / len(strings))
+ retval = ""
+ for string in strings:
+ string = justify_string(
+ string, width=subwidth, alignment=alignment, padding=padding
+ )
+ retval += string
+ return retval
+
+
+def justify_string_by_chunk(
+ string: str, width: int = 80, padding: str = " "
+) -> str:
+ padding = padding[0]
+ first, *rest, last = string.split()
+ w = width - (len(first) + 1 + len(last) + 1)
+ retval = (
+ first + padding + distribute_strings(rest, width=w, padding=padding)
+ )
+ while len(retval) + len(last) < width:
+ retval += padding
+ retval += last
+ return retval
+
+
+def justify_string(
+ string: str, *, width: int = 80, alignment: str = "c", padding: str = " "
+) -> str:
+ alignment = alignment[0]
+ padding = padding[0]
+ while len(string) < width:
+ if alignment == "l":
+ string += padding
+ elif alignment == "r":
+ string = padding + string
+ elif alignment == "j":
+ return justify_string_by_chunk(
+ string,
+ width=width,
+ padding=padding
+ )
+ elif alignment == "c":
+ if len(string) % 2 == 0:
+ string += padding
+ else:
+ string = padding + string
+ else:
+ raise ValueError
+ return string
+
+
+def justify_text(text: str, *, width: int = 80, alignment: str = "c") -> str:
+ print("-" * width)
+ retval = ""
+ line = ""
+ for word in text.split():
+ if len(line) + len(word) > width:
+ line = line[1:]
+ line = justify_string(line, width=width, alignment=alignment)
+ retval = retval + "\n" + line
+ line = ""
+ line = line + " " + word
+ if len(line) > 0:
+ retval += "\n" + line[1:]
+ return retval[1:]
--- /dev/null
+#!/usr/bin/env python3
+
+import functools
+import logging
+import os
+import threading
+from typing import Callable, Optional, Tuple
+
+logger = logging.getLogger(__name__)
+
+
+def current_thread_id() -> str:
+ ppid = os.getppid()
+ pid = os.getpid()
+ tid = threading.current_thread().name
+ return f'{ppid}/{pid}/{tid}:'
+
+
+def is_current_thread_main_thread() -> bool:
+ return threading.current_thread() is threading.main_thread()
+
+
+def background_thread(
+ _funct: Optional[Callable]
+) -> Tuple[threading.Thread, threading.Event]:
+ """A function decorator to create a background thread.
+
+ *** Please note: the decorated function must take an shutdown ***
+ *** event as an input parameter and should periodically check ***
+ *** it and stop if the event is set. ***
+
+ Usage:
+
+ @background_thread
+ def random(a: int, b: str, stop_event: threading.Event) -> None:
+ while True:
+ print(f"Hi there {b}: {a}!")
+ time.sleep(10.0)
+ if stop_event.is_set():
+ return
+
+
+ def main() -> None:
+ (thread, event) = random(22, "dude")
+ print("back!")
+ time.sleep(30.0)
+ event.set()
+ thread.join()
+
+ Note: in addition to any other arguments the function has, it must
+ take a stop_event as the last unnamed argument which it should
+ periodically check. If the event is set, it means the thread has
+ been requested to terminate ASAP.
+ """
+ def wrapper(funct: Callable):
+ @functools.wraps(funct)
+ def inner_wrapper(
+ *a, **kwa
+ ) -> Tuple[threading.Thread, threading.Event]:
+ should_terminate = threading.Event()
+ should_terminate.clear()
+ newargs = (*a, should_terminate)
+ thread = threading.Thread(
+ target=funct,
+ args=newargs,
+ kwargs=kwa,
+ )
+ thread.start()
+ logger.debug(
+ f'Started thread {thread.name} tid={thread.ident}'
+ )
+ return (thread, should_terminate)
+ return inner_wrapper
+
+ if _funct is None:
+ return wrapper
+ else:
+ return wrapper(_funct)
+
+
+def periodically_invoke(
+ period_sec: float,
+ stop_after: Optional[int],
+):
+ """
+ Periodically invoke a decorated function. Stop after N invocations
+ (or, if stop_after is None, call forever). Delay period_sec between
+ invocations.
+
+ Returns a Thread object and an Event that, when signaled, will stop
+ the invocations. Note that it is possible to be invoked one time
+ after the Event is set. This event can be used to stop infinite
+ invocation style or finite invocation style decorations.
+
+ @periodically_invoke(period_sec=0.5, stop_after=None)
+ def there(name: str, age: int) -> None:
+ print(f" ...there {name}, {age}")
+
+
+ @periodically_invoke(period_sec=1.0, stop_after=3)
+ def hello(name: str) -> None:
+ print(f"Hello, {name}")
+
+ """
+ def decorator_repeat(func):
+ def helper_thread(should_terminate, *args, **kwargs) -> None:
+ if stop_after is None:
+ while True:
+ func(*args, **kwargs)
+ res = should_terminate.wait(period_sec)
+ if res:
+ return
+ else:
+ for _ in range(stop_after):
+ func(*args, **kwargs)
+ res = should_terminate.wait(period_sec)
+ if res:
+ return
+ return
+
+ @functools.wraps(func)
+ def wrapper_repeat(*args, **kwargs):
+ should_terminate = threading.Event()
+ should_terminate.clear()
+ newargs = (should_terminate, *args)
+ thread = threading.Thread(
+ target=helper_thread,
+ args = newargs,
+ kwargs = kwargs
+ )
+ thread.start()
+ logger.debug(
+ f'Started thread {thread.name} tid={thread.ident}'
+ )
+ return (thread, should_terminate)
+ return wrapper_repeat
+ return decorator_repeat
--- /dev/null
+#!/usr/bin/env python3
+
+import logging
+from typing import Any, Optional
+
+logger = logging.getLogger(__name__)
+
+
+def unwrap_optional(x: Optional[Any]) -> Any:
+ if x is None:
+ msg = 'Argument to unwrap_optional was unexpectedly None'
+ logger.critical(msg)
+ raise AssertionError(msg)
+ return x