"""A binary search tree implementation."""
-from abc import abstractmethod
-from typing import Any, Generator, List, Optional, Protocol
+from typing import Generator, List, Optional
-
-class Comparable(Protocol):
- """Anything that implements basic comparison methods such that it
- can be compared to other instances of the same type.
-
- Check out :meth:`functools.total_ordering`
- (https://docs.python.org/3/library/functools.html#functools.total_ordering)
- for an easy way to make your type comparable.
- """
-
- @abstractmethod
- def __lt__(self, other: Any) -> bool:
- ...
-
- @abstractmethod
- def __le__(self, other: Any) -> bool:
- ...
-
- @abstractmethod
- def __eq__(self, other: Any) -> bool:
- ...
+from pyutils.typez.typing import Comparable
class Node:
"""This module contains helper functions for dealing with Python dictionaries."""
from itertools import islice
-from typing import Any, Callable, Dict, Iterator, List, Tuple
+from typing import Any, Callable, Dict, Hashable, Iterator, List, Tuple
from pyutils import dataclass_utils
+from pyutils.typez.typing import Comparable
+
+AnyDict = Dict[Hashable, Any]
def init_or_inc(
- d: Dict[Any, Any],
- key: Any,
+ d: AnyDict,
+ key: Hashable,
*,
init_value: Any = 1,
inc_function: Callable[..., Any] = lambda x: x + 1,
return False
-def shard(d: Dict[Any, Any], size: int) -> Iterator[Dict[Any, Any]]:
+def shard(d: AnyDict, size: int) -> Iterator[AnyDict]:
"""
Shards (i.e. splits) a dict into N subdicts which, together,
contain all keys/values from the original unsharded dict.
def coalesce(
- inputs: Iterator[Dict[Any, Any]],
+ inputs: Iterator[AnyDict],
*,
aggregation_function: Callable[[Any, Any, Any], Any] = coalesce_by_creating_list,
-) -> Dict[Any, Any]:
+) -> AnyDict:
"""Coalesce (i.e. combine) N input dicts into one output dict
ontaining the union of all keys / values in every input dict.
When keys collide, apply the aggregation_function which, by
...
Exception: Key b is duplicated in more than one input dict.
"""
- out: Dict[Any, Any] = {}
+ out: AnyDict = {}
for d in inputs:
for key in d:
if key in out:
return out
-def item_with_max_value(d: Dict[Any, Any]) -> Tuple[Any, Any]:
+def item_with_max_value(d: AnyDict) -> Tuple[Hashable, Any]:
"""
Args:
d: a dict with comparable values
return max(d.items(), key=lambda _: _[1])
-def item_with_min_value(d: Dict[Any, Any]) -> Tuple[Any, Any]:
+def item_with_min_value(d: AnyDict) -> Tuple[Hashable, Any]:
"""
Args:
d: a dict with comparable values
return min(d.items(), key=lambda _: _[1])
-def key_with_max_value(d: Dict[Any, Any]) -> Any:
+def key_with_max_value(d: AnyDict) -> Hashable:
"""
Args:
d: a dict with comparable keys
return item_with_max_value(d)[0]
-def key_with_min_value(d: Dict[Any, Any]) -> Any:
+def key_with_min_value(d: AnyDict) -> Hashable:
"""
Args:
d: a dict with comparable keys
return item_with_min_value(d)[0]
-def max_value(d: Dict[Any, Any]) -> Any:
+def max_value(d: AnyDict) -> Any:
"""
Args:
d: a dict with compatable values
return item_with_max_value(d)[1]
-def min_value(d: Dict[Any, Any]) -> Any:
+def min_value(d: AnyDict) -> Any:
"""
Args:
d: a dict with comparable values
return item_with_min_value(d)[1]
-def max_key(d: Dict[Any, Any]) -> Any:
+def max_key(d: Dict[Comparable, Any]) -> Comparable:
"""
Args:
d: a dict with comparable keys
return max(d.keys())
-def min_key(d: Dict[Any, Any]) -> Any:
+def min_key(d: Dict[Comparable, Any]) -> Comparable:
"""
Args:
d: a dict with comparable keys
return min(d.keys())
-def parallel_lists_to_dict(keys: List[Any], values: List[Any]) -> Dict[Any, Any]:
+def parallel_lists_to_dict(keys: List[Hashable], values: List[Any]) -> AnyDict:
"""Given two parallel lists (keys and values), create and return
a dict.
return dict(zip(keys, values))
-def dict_to_key_value_lists(d: Dict[Any, Any]) -> Tuple[List[Any], List[Any]]:
+def dict_to_key_value_lists(d: AnyDict) -> Tuple[List[Hashable], List[Any]]:
"""Given a dict, decompose it into a list of keys and values.
Args: