From d925b691c451e6581e81abd7282ea9496d96c609 Mon Sep 17 00:00:00 2001 From: Scott Gasch Date: Tue, 18 Oct 2022 22:22:32 -0700 Subject: [PATCH] Add fff.py as an example but it's really a devtool that I used in the pre-commit hook. --- examples/fff/fff.py | 61 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100755 examples/fff/fff.py diff --git a/examples/fff/fff.py b/examples/fff/fff.py new file mode 100755 index 0000000..332f826 --- /dev/null +++ b/examples/fff/fff.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +""" +This isn't really an example of pyutils but rather a development +tool I used as part of a git pre-commit hook... + +f)ind f)ucked f)-strings + +Searches python files for suspicious looking strings that seem to +use f-string {interpolations} without acutally being f-strings. + +Usage: fff.py *.py +""" + +import re +import sys +import tokenize +from pathlib import Path +from token import STRING +from typing import List, Tuple + +from pyutils import ansi + +curly = re.compile(r'\{[^\}]+\}') + + +def looks_suspicious(q: str, previous_tokens: List[Tuple[int, str]]) -> bool: + for pair in previous_tokens: + if ':' in pair[1]: + return False + return q[0] != 'f' and curly.search(q) is not None + + +for filename in sys.argv[1:]: + path = Path(filename) + if path.suffix != ".py": + print(f"{filename} doesn't look like python; skipping.", file=sys.stderr) + continue + with tokenize.open(filename) as f: + previous_tokens = [] + for token in tokenize.generate_tokens(f.readline): + (ttype, text, (start_row, _), (end_row, _), _) = token + if ttype == STRING: + if ( + 'r"' not in text + and "r'" not in text + and looks_suspicious(text, previous_tokens) + ): + print( + f"{ansi.fg('green')}{filename}:{start_row}-{end_row}>{ansi.reset()}" + ) + for (n, line) in enumerate(text.split('\n')): + print( + f'{ansi.fg("dark gray")}{start_row+n}:{ansi.reset()} {line}' + ) + # print('Previous context:') + # for pair in previous_tokens: + # print(f'{pair[0]} ({pair[1]})', end=' + ') + # print() + previous_tokens.append((ttype, text)) + previous_tokens = previous_tokens[-3:] -- 2.45.2