1 import abc 2 from abc import abstractmethod, abstractproperty 3 import collections 4 import contextlib 5 import functools 6 import re as stdlib_re # Avoid confusion with the re we export. 7 import sys 8 import types 9 try: 10 import collections.abc as collections_abc 11 except ImportError: 12 import collections as collections_abc # Fallback for PY3.2. 13 try: 14 from types import SlotWrapperType, MethodWrapperType, MethodDescriptorType 15 except ImportError: 16 SlotWrapperType = type(object.__init__) 17 MethodWrapperType = type(object().__str__) 18 MethodDescriptorType = type(str.join) 19 20 21 # Please keep __all__ alphabetized within each category. 22 __all__ = [ 23 # Super-special typing primitives. 24 'Any', 25 'Callable', 26 'ClassVar', 27 'Generic', 28 'Optional', 29 'Tuple', 30 'Type', 31 'TypeVar', 32 'Union', 33 34 # ABCs (from collections.abc). 35 'AbstractSet', # collections.abc.Set. 36 'GenericMeta', # subclass of abc.ABCMeta and a metaclass 37 # for 'Generic' and ABCs below. 38 'ByteString', 39 'Container', 40 'Hashable', 41 'ItemsView', 42 'Iterable', 43 'Iterator', 44 'KeysView', 45 'Mapping', 46 'MappingView', 47 'MutableMapping', 48 'MutableSequence', 49 'MutableSet', 50 'Sequence', 51 'Sized', 52 'ValuesView', 53 # The following are added depending on presence 54 # of their non-generic counterparts in stdlib: 55 # Awaitable, 56 # AsyncIterator, 57 # AsyncIterable, 58 # Coroutine, 59 # Collection, 60 # ContextManager, 61 # AsyncGenerator, 62 63 # Structural checks, a.k.a. protocols. 64 'Reversible', 65 'SupportsAbs', 66 'SupportsFloat', 67 'SupportsInt', 68 'SupportsRound', 69 70 # Concrete collection types. 71 'Counter', 72 'Deque', 73 'Dict', 74 'DefaultDict', 75 'List', 76 'Set', 77 'FrozenSet', 78 'NamedTuple', # Not really a type. 79 'Generator', 80 81 # One-off things. 82 'AnyStr', 83 'cast', 84 'get_type_hints', 85 'NewType', 86 'no_type_check', 87 'no_type_check_decorator', 88 'overload', 89 'Text', 90 'TYPE_CHECKING', 91 ] 92 93 # The pseudo-submodules 're' and 'io' are part of the public 94 # namespace, but excluded from __all__ because they might stomp on 95 # legitimate imports of those modules. 96 97 98 def _qualname(x): 99 if sys.version_info[:2] >= (3, 3): 100 return x.__qualname__ 101 else: 102 # Fall back to just name. 103 return x.__name__ 104 105 106 def _trim_name(nm): 107 whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') 108 if nm.startswith('_') and nm not in whitelist: 109 nm = nm[1:] 110 return nm 111 112 113 class TypingMeta(type): 114 """Metaclass for most types defined in typing module 115 (not a part of public API). 116 117 This overrides __new__() to require an extra keyword parameter 118 '_root', which serves as a guard against naive subclassing of the 119 typing classes. Any legitimate class defined using a metaclass 120 derived from TypingMeta must pass _root=True. 121 122 This also defines a dummy constructor (all the work for most typing 123 constructs is done in __new__) and a nicer repr(). 124 """ 125 126 _is_protocol = False 127 128 def __new__(cls, name, bases, namespace, *, _root=False): 129 if not _root: 130 raise TypeError("Cannot subclass %s" % 131 (', '.join(map(_type_repr, bases)) or '()')) 132 return super().__new__(cls, name, bases, namespace) 133 134 def __init__(self, *args, **kwds): 135 pass 136 137 def _eval_type(self, globalns, localns): 138 """Override this in subclasses to interpret forward references. 139 140 For example, List['C'] is internally stored as 141 List[_ForwardRef('C')], which should evaluate to List[C], 142 where C is an object found in globalns or localns (searching 143 localns first, of course). 144 """ 145 return self 146 147 def _get_type_vars(self, tvars): 148 pass 149 150 def __repr__(self): 151 qname = _trim_name(_qualname(self)) 152 return '%s.%s' % (self.__module__, qname) 153 154 155 class _TypingBase(metaclass=TypingMeta, _root=True): 156 """Internal indicator of special typing constructs.""" 157 158 __slots__ = ('__weakref__',) 159 160 def __init__(self, *args, **kwds): 161 pass 162 163 def __new__(cls, *args, **kwds): 164 """Constructor. 165 166 This only exists to give a better error message in case 167 someone tries to subclass a special typing object (not a good idea). 168 """ 169 if (len(args) == 3 and 170 isinstance(args[0], str) and 171 isinstance(args[1], tuple)): 172 # Close enough. 173 raise TypeError("Cannot subclass %r" % cls) 174 return super().__new__(cls) 175 176 # Things that are not classes also need these. 177 def _eval_type(self, globalns, localns): 178 return self 179 180 def _get_type_vars(self, tvars): 181 pass 182 183 def __repr__(self): 184 cls = type(self) 185 qname = _trim_name(_qualname(cls)) 186 return '%s.%s' % (cls.__module__, qname) 187 188 def __call__(self, *args, **kwds): 189 raise TypeError("Cannot instantiate %r" % type(self)) 190 191 192 class _FinalTypingBase(_TypingBase, _root=True): 193 """Internal mix-in class to prevent instantiation. 194 195 Prevents instantiation unless _root=True is given in class call. 196 It is used to create pseudo-singleton instances Any, Union, Optional, etc. 197 """ 198 199 __slots__ = () 200 201 def __new__(cls, *args, _root=False, **kwds): 202 self = super().__new__(cls, *args, **kwds) 203 if _root is True: 204 return self 205 raise TypeError("Cannot instantiate %r" % cls) 206 207 def __reduce__(self): 208 return _trim_name(type(self).__name__) 209 210 211 class _ForwardRef(_TypingBase, _root=True): 212 """Internal wrapper to hold a forward reference.""" 213 214 __slots__ = ('__forward_arg__', '__forward_code__', 215 '__forward_evaluated__', '__forward_value__') 216 217 def __init__(self, arg): 218 super().__init__(arg) 219 if not isinstance(arg, str): 220 raise TypeError('Forward reference must be a string -- got %r' % (arg,)) 221 try: 222 code = compile(arg, '<string>', 'eval') 223 except SyntaxError: 224 raise SyntaxError('Forward reference must be an expression -- got %r' % 225 (arg,)) 226 self.__forward_arg__ = arg 227 self.__forward_code__ = code 228 self.__forward_evaluated__ = False 229 self.__forward_value__ = None 230 231 def _eval_type(self, globalns, localns): 232 if not self.__forward_evaluated__ or localns is not globalns: 233 if globalns is None and localns is None: 234 globalns = localns = {} 235 elif globalns is None: 236 globalns = localns 237 elif localns is None: 238 localns = globalns 239 self.__forward_value__ = _type_check( 240 eval(self.__forward_code__, globalns, localns), 241 "Forward references must evaluate to types.") 242 self.__forward_evaluated__ = True 243 return self.__forward_value__ 244 245 def __eq__(self, other): 246 if not isinstance(other, _ForwardRef): 247 return NotImplemented 248 return (self.__forward_arg__ == other.__forward_arg__ and 249 self.__forward_value__ == other.__forward_value__) 250 251 def __hash__(self): 252 return hash((self.__forward_arg__, self.__forward_value__)) 253 254 def __instancecheck__(self, obj): 255 raise TypeError("Forward references cannot be used with isinstance().") 256 257 def __subclasscheck__(self, cls): 258 raise TypeError("Forward references cannot be used with issubclass().") 259 260 def __repr__(self): 261 return '_ForwardRef(%r)' % (self.__forward_arg__,) 262 263 264 class _TypeAlias(_TypingBase, _root=True): 265 """Internal helper class for defining generic variants of concrete types. 266 267 Note that this is not a type; let's call it a pseudo-type. It cannot 268 be used in instance and subclass checks in parameterized form, i.e. 269 ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning 270 ``False``. 271 """ 272 273 __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') 274 275 def __init__(self, name, type_var, impl_type, type_checker): 276 """Initializer. 277 278 Args: 279 name: The name, e.g. 'Pattern'. 280 type_var: The type parameter, e.g. AnyStr, or the 281 specific type, e.g. str. 282 impl_type: The implementation type. 283 type_checker: Function that takes an impl_type instance. 284 and returns a value that should be a type_var instance. 285 """ 286 assert isinstance(name, str), repr(name) 287 assert isinstance(impl_type, type), repr(impl_type) 288 assert not isinstance(impl_type, TypingMeta), repr(impl_type) 289 assert isinstance(type_var, (type, _TypingBase)), repr(type_var) 290 self.name = name 291 self.type_var = type_var 292 self.impl_type = impl_type 293 self.type_checker = type_checker 294 295 def __repr__(self): 296 return "%s[%s]" % (self.name, _type_repr(self.type_var)) 297 298 def __getitem__(self, parameter): 299 if not isinstance(self.type_var, TypeVar): 300 raise TypeError("%s cannot be further parameterized." % self) 301 if self.type_var.__constraints__ and isinstance(parameter, type): 302 if not issubclass(parameter, self.type_var.__constraints__): 303 raise TypeError("%s is not a valid substitution for %s." % 304 (parameter, self.type_var)) 305 if isinstance(parameter, TypeVar) and parameter is not self.type_var: 306 raise TypeError("%s cannot be re-parameterized." % self) 307 return self.__class__(self.name, parameter, 308 self.impl_type, self.type_checker) 309 310 def __eq__(self, other): 311 if not isinstance(other, _TypeAlias): 312 return NotImplemented 313 return self.name == other.name and self.type_var == other.type_var 314 315 def __hash__(self): 316 return hash((self.name, self.type_var)) 317 318 def __instancecheck__(self, obj): 319 if not isinstance(self.type_var, TypeVar): 320 raise TypeError("Parameterized type aliases cannot be used " 321 "with isinstance().") 322 return isinstance(obj, self.impl_type) 323 324 def __subclasscheck__(self, cls): 325 if not isinstance(self.type_var, TypeVar): 326 raise TypeError("Parameterized type aliases cannot be used " 327 "with issubclass().") 328 return issubclass(cls, self.impl_type) 329 330 331 def _get_type_vars(types, tvars): 332 for t in types: 333 if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): 334 t._get_type_vars(tvars) 335 336 337 def _type_vars(types): 338 tvars = [] 339 _get_type_vars(types, tvars) 340 return tuple(tvars) 341 342 343 def _eval_type(t, globalns, localns): 344 if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): 345 return t._eval_type(globalns, localns) 346 return t 347 348 349 def _type_check(arg, msg): 350 """Check that the argument is a type, and return it (internal helper). 351 352 As a special case, accept None and return type(None) instead. 353 Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. 354 355 The msg argument is a human-readable error message, e.g. 356 357 "Union[arg, ...]: arg should be a type." 358 359 We append the repr() of the actual value (truncated to 100 chars). 360 """ 361 if arg is None: 362 return type(None) 363 if isinstance(arg, str): 364 arg = _ForwardRef(arg) 365 if ( 366 isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or 367 not isinstance(arg, (type, _TypingBase)) and not callable(arg) 368 ): 369 raise TypeError(msg + " Got %.100r." % (arg,)) 370 # Bare Union etc. are not valid as type arguments 371 if ( 372 type(arg).__name__ in ('_Union', '_Optional') and 373 not getattr(arg, '__origin__', None) or 374 isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) 375 ): 376 raise TypeError("Plain %s is not valid as type argument" % arg) 377 return arg 378 379 380 def _type_repr(obj): 381 """Return the repr() of an object, special-casing types (internal helper). 382 383 If obj is a type, we return a shorter version than the default 384 type.__repr__, based on the module and qualified name, which is 385 typically enough to uniquely identify a type. For everything 386 else, we fall back on repr(obj). 387 """ 388 if isinstance(obj, type) and not isinstance(obj, TypingMeta): 389 if obj.__module__ == 'builtins': 390 return _qualname(obj) 391 return '%s.%s' % (obj.__module__, _qualname(obj)) 392 if obj is ...: 393 return('...') 394 if isinstance(obj, types.FunctionType): 395 return obj.__name__ 396 return repr(obj) 397 398 399 class _Any(_FinalTypingBase, _root=True): 400 """Special type indicating an unconstrained type. 401 402 - Any is compatible with every type. 403 - Any assumed to have all methods. 404 - All values assumed to be instances of Any. 405 406 Note that all the above statements are true from the point of view of 407 static type checkers. At runtime, Any should not be used with instance 408 or class checks. 409 """ 410 411 __slots__ = () 412 413 def __instancecheck__(self, obj): 414 raise TypeError("Any cannot be used with isinstance().") 415 416 def __subclasscheck__(self, cls): 417 raise TypeError("Any cannot be used with issubclass().") 418 419 420 Any = _Any(_root=True) 421 422 423 class TypeVar(_TypingBase, _root=True): 424 """Type variable. 425 426 Usage:: 427 428 T = TypeVar('T') # Can be anything 429 A = TypeVar('A', str, bytes) # Must be str or bytes 430 431 Type variables exist primarily for the benefit of static type 432 checkers. They serve as the parameters for generic types as well 433 as for generic function definitions. See class Generic for more 434 information on generic types. Generic functions work as follows: 435 436 def repeat(x: T, n: int) -> List[T]: 437 '''Return a list containing n references to x.''' 438 return [x]*n 439 440 def longest(x: A, y: A) -> A: 441 '''Return the longest of two strings.''' 442 return x if len(x) >= len(y) else y 443 444 The latter example's signature is essentially the overloading 445 of (str, str) -> str and (bytes, bytes) -> bytes. Also note 446 that if the arguments are instances of some subclass of str, 447 the return type is still plain str. 448 449 At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. 450 451 Type variables defined with covariant=True or contravariant=True 452 can be used do declare covariant or contravariant generic types. 453 See PEP 484 for more details. By default generic types are invariant 454 in all type variables. 455 456 Type variables can be introspected. e.g.: 457 458 T.__name__ == 'T' 459 T.__constraints__ == () 460 T.__covariant__ == False 461 T.__contravariant__ = False 462 A.__constraints__ == (str, bytes) 463 """ 464 465 __slots__ = ('__name__', '__bound__', '__constraints__', 466 '__covariant__', '__contravariant__') 467 468 def __init__(self, name, *constraints, bound=None, 469 covariant=False, contravariant=False): 470 super().__init__(name, *constraints, bound=bound, 471 covariant=covariant, contravariant=contravariant) 472 self.__name__ = name 473 if covariant and contravariant: 474 raise ValueError("Bivariant types are not supported.") 475 self.__covariant__ = bool(covariant) 476 self.__contravariant__ = bool(contravariant) 477 if constraints and bound is not None: 478 raise TypeError("Constraints cannot be combined with bound=...") 479 if constraints and len(constraints) == 1: 480 raise TypeError("A single constraint is not allowed") 481 msg = "TypeVar(name, constraint, ...): constraints must be types." 482 self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) 483 if bound: 484 self.__bound__ = _type_check(bound, "Bound must be a type.") 485 else: 486 self.__bound__ = None 487 488 def _get_type_vars(self, tvars): 489 if self not in tvars: 490 tvars.append(self) 491 492 def __repr__(self): 493 if self.__covariant__: 494 prefix = '+' 495 elif self.__contravariant__: 496 prefix = '-' 497 else: 498 prefix = '~' 499 return prefix + self.__name__ 500 501 def __instancecheck__(self, instance): 502 raise TypeError("Type variables cannot be used with isinstance().") 503 504 def __subclasscheck__(self, cls): 505 raise TypeError("Type variables cannot be used with issubclass().") 506 507 508 # Some unconstrained type variables. These are used by the container types. 509 # (These are not for export.) 510 T = TypeVar('T') # Any type. 511 KT = TypeVar('KT') # Key type. 512 VT = TypeVar('VT') # Value type. 513 T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. 514 V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. 515 VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. 516 T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. 517 518 # A useful type variable with constraints. This represents string types. 519 # (This one *is* for export!) 520 AnyStr = TypeVar('AnyStr', bytes, str) 521 522 523 def _replace_arg(arg, tvars, args): 524 """An internal helper function: replace arg if it is a type variable 525 found in tvars with corresponding substitution from args or 526 with corresponding substitution sub-tree if arg is a generic type. 527 """ 528 529 if tvars is None: 530 tvars = [] 531 if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): 532 return arg._subs_tree(tvars, args) 533 if isinstance(arg, TypeVar): 534 for i, tvar in enumerate(tvars): 535 if arg == tvar: 536 return args[i] 537 return arg 538 539 540 # Special typing constructs Union, Optional, Generic, Callable and Tuple 541 # use three special attributes for internal bookkeeping of generic types: 542 # * __parameters__ is a tuple of unique free type parameters of a generic 543 # type, for example, Dict[T, T].__parameters__ == (T,); 544 # * __origin__ keeps a reference to a type that was subscripted, 545 # e.g., Union[T, int].__origin__ == Union; 546 # * __args__ is a tuple of all arguments used in subscripting, 547 # e.g., Dict[T, int].__args__ == (T, int). 548 549 550 def _subs_tree(cls, tvars=None, args=None): 551 """An internal helper function: calculate substitution tree 552 for generic cls after replacing its type parameters with 553 substitutions in tvars -> args (if any). 554 Repeat the same following __origin__'s. 555 556 Return a list of arguments with all possible substitutions 557 performed. Arguments that are generic classes themselves are represented 558 as tuples (so that no new classes are created by this function). 559 For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] 560 """ 561 562 if cls.__origin__ is None: 563 return cls 564 # Make of chain of origins (i.e. cls -> cls.__origin__) 565 current = cls.__origin__ 566 orig_chain = [] 567 while current.__origin__ is not None: 568 orig_chain.append(current) 569 current = current.__origin__ 570 # Replace type variables in __args__ if asked ... 571 tree_args = [] 572 for arg in cls.__args__: 573 tree_args.append(_replace_arg(arg, tvars, args)) 574 # ... then continue replacing down the origin chain. 575 for ocls in orig_chain: 576 new_tree_args = [] 577 for arg in ocls.__args__: 578 new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) 579 tree_args = new_tree_args 580 return tree_args 581 582 583 def _remove_dups_flatten(parameters): 584 """An internal helper for Union creation and substitution: flatten Union's 585 among parameters, then remove duplicates and strict subclasses. 586 """ 587 588 # Flatten out Union[Union[...], ...]. 589 params = [] 590 for p in parameters: 591 if isinstance(p, _Union) and p.__origin__ is Union: 592 params.extend(p.__args__) 593 elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: 594 params.extend(p[1:]) 595 else: 596 params.append(p) 597 # Weed out strict duplicates, preserving the first of each occurrence. 598 all_params = set(params) 599 if len(all_params) < len(params): 600 new_params = [] 601 for t in params: 602 if t in all_params: 603 new_params.append(t) 604 all_params.remove(t) 605 params = new_params 606 assert not all_params, all_params 607 # Weed out subclasses. 608 # E.g. Union[int, Employee, Manager] == Union[int, Employee]. 609 # If object is present it will be sole survivor among proper classes. 610 # Never discard type variables. 611 # (In particular, Union[str, AnyStr] != AnyStr.) 612 all_params = set(params) 613 for t1 in params: 614 if not isinstance(t1, type): 615 continue 616 if any(isinstance(t2, type) and issubclass(t1, t2) 617 for t2 in all_params - {t1} 618 if not (isinstance(t2, GenericMeta) and 619 t2.__origin__ is not None)): 620 all_params.remove(t1) 621 return tuple(t for t in params if t in all_params) 622 623 624 def _check_generic(cls, parameters): 625 # Check correct count for parameters of a generic cls (internal helper). 626 if not cls.__parameters__: 627 raise TypeError("%s is not a generic class" % repr(cls)) 628 alen = len(parameters) 629 elen = len(cls.__parameters__) 630 if alen != elen: 631 raise TypeError("Too %s parameters for %s; actual %s, expected %s" % 632 ("many" if alen > elen else "few", repr(cls), alen, elen)) 633 634 635 _cleanups = [] 636 637 638 def _tp_cache(func): 639 """Internal wrapper caching __getitem__ of generic types with a fallback to 640 original function for non-hashable arguments. 641 """ 642 643 cached = functools.lru_cache()(func) 644 _cleanups.append(cached.cache_clear) 645 646 @functools.wraps(func) 647 def inner(*args, **kwds): 648 try: 649 return cached(*args, **kwds) 650 except TypeError: 651 pass # All real errors (not unhashable args) are raised below. 652 return func(*args, **kwds) 653 return inner 654 655 656 class _Union(_FinalTypingBase, _root=True): 657 """Union type; Union[X, Y] means either X or Y. 658 659 To define a union, use e.g. Union[int, str]. Details: 660 661 - The arguments must be types and there must be at least one. 662 663 - None as an argument is a special case and is replaced by 664 type(None). 665 666 - Unions of unions are flattened, e.g.:: 667 668 Union[Union[int, str], float] == Union[int, str, float] 669 670 - Unions of a single argument vanish, e.g.:: 671 672 Union[int] == int # The constructor actually returns int 673 674 - Redundant arguments are skipped, e.g.:: 675 676 Union[int, str, int] == Union[int, str] 677 678 - When comparing unions, the argument order is ignored, e.g.:: 679 680 Union[int, str] == Union[str, int] 681 682 - When two arguments have a subclass relationship, the least 683 derived argument is kept, e.g.:: 684 685 class Employee: pass 686 class Manager(Employee): pass 687 Union[int, Employee, Manager] == Union[int, Employee] 688 Union[Manager, int, Employee] == Union[int, Employee] 689 Union[Employee, Manager] == Employee 690 691 - Similar for object:: 692 693 Union[int, object] == object 694 695 - You cannot subclass or instantiate a union. 696 697 - You can use Optional[X] as a shorthand for Union[X, None]. 698 """ 699 700 __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') 701 702 def __new__(cls, parameters=None, origin=None, *args, _root=False): 703 self = super().__new__(cls, parameters, origin, *args, _root=_root) 704 if origin is None: 705 self.__parameters__ = None 706 self.__args__ = None 707 self.__origin__ = None 708 self.__tree_hash__ = hash(frozenset(('Union',))) 709 return self 710 if not isinstance(parameters, tuple): 711 raise TypeError("Expected parameters=<tuple>") 712 if origin is Union: 713 parameters = _remove_dups_flatten(parameters) 714 # It's not a union if there's only one type left. 715 if len(parameters) == 1: 716 return parameters[0] 717 self.__parameters__ = _type_vars(parameters) 718 self.__args__ = parameters 719 self.__origin__ = origin 720 # Pre-calculate the __hash__ on instantiation. 721 # This improves speed for complex substitutions. 722 subs_tree = self._subs_tree() 723 if isinstance(subs_tree, tuple): 724 self.__tree_hash__ = hash(frozenset(subs_tree)) 725 else: 726 self.__tree_hash__ = hash(subs_tree) 727 return self 728 729 def _eval_type(self, globalns, localns): 730 if self.__args__ is None: 731 return self 732 ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) 733 ev_origin = _eval_type(self.__origin__, globalns, localns) 734 if ev_args == self.__args__ and ev_origin == self.__origin__: 735 # Everything is already evaluated. 736 return self 737 return self.__class__(ev_args, ev_origin, _root=True) 738 739 def _get_type_vars(self, tvars): 740 if self.__origin__ and self.__parameters__: 741 _get_type_vars(self.__parameters__, tvars) 742 743 def __repr__(self): 744 if self.__origin__ is None: 745 return super().__repr__() 746 tree = self._subs_tree() 747 if not isinstance(tree, tuple): 748 return repr(tree) 749 return tree[0]._tree_repr(tree) 750 751 def _tree_repr(self, tree): 752 arg_list = [] 753 for arg in tree[1:]: 754 if not isinstance(arg, tuple): 755 arg_list.append(_type_repr(arg)) 756 else: 757 arg_list.append(arg[0]._tree_repr(arg)) 758 return super().__repr__() + '[%s]' % ', '.join(arg_list) 759 760 @_tp_cache 761 def __getitem__(self, parameters): 762 if parameters == (): 763 raise TypeError("Cannot take a Union of no types.") 764 if not isinstance(parameters, tuple): 765 parameters = (parameters,) 766 if self.__origin__ is None: 767 msg = "Union[arg, ...]: each arg must be a type." 768 else: 769 msg = "Parameters to generic types must be types." 770 parameters = tuple(_type_check(p, msg) for p in parameters) 771 if self is not Union: 772 _check_generic(self, parameters) 773 return self.__class__(parameters, origin=self, _root=True) 774 775 def _subs_tree(self, tvars=None, args=None): 776 if self is Union: 777 return Union # Nothing to substitute 778 tree_args = _subs_tree(self, tvars, args) 779 tree_args = _remove_dups_flatten(tree_args) 780 if len(tree_args) == 1: 781 return tree_args[0] # Union of a single type is that type 782 return (Union,) + tree_args 783 784 def __eq__(self, other): 785 if isinstance(other, _Union): 786 return self.__tree_hash__ == other.__tree_hash__ 787 elif self is not Union: 788 return self._subs_tree() == other 789 else: 790 return self is other 791 792 def __hash__(self): 793 return self.__tree_hash__ 794 795 def __instancecheck__(self, obj): 796 raise TypeError("Unions cannot be used with isinstance().") 797 798 def __subclasscheck__(self, cls): 799 raise TypeError("Unions cannot be used with issubclass().") 800 801 802 Union = _Union(_root=True) 803 804 805 class _Optional(_FinalTypingBase, _root=True): 806 """Optional type. 807 808 Optional[X] is equivalent to Union[X, None]. 809 """ 810 811 __slots__ = () 812 813 @_tp_cache 814 def __getitem__(self, arg): 815 arg = _type_check(arg, "Optional[t] requires a single type.") 816 return Union[arg, type(None)] 817 818 819 Optional = _Optional(_root=True) 820 821 822 def _gorg(a): 823 """Return the farthest origin of a generic class (internal helper).""" 824 assert isinstance(a, GenericMeta) 825 while a.__origin__ is not None: 826 a = a.__origin__ 827 return a 828 829 830 def _geqv(a, b): 831 """Return whether two generic classes are equivalent (internal helper). 832 833 The intention is to consider generic class X and any of its 834 parameterized forms (X[T], X[int], etc.) as equivalent. 835 836 However, X is not equivalent to a subclass of X. 837 838 The relation is reflexive, symmetric and transitive. 839 """ 840 assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) 841 # Reduce each to its origin. 842 return _gorg(a) is _gorg(b) 843 844 845 def _next_in_mro(cls): 846 """Helper for Generic.__new__. 847 848 Returns the class after the last occurrence of Generic or 849 Generic[...] in cls.__mro__. 850 """ 851 next_in_mro = object 852 # Look for the last occurrence of Generic or Generic[...]. 853 for i, c in enumerate(cls.__mro__[:-1]): 854 if isinstance(c, GenericMeta) and _gorg(c) is Generic: 855 next_in_mro = cls.__mro__[i + 1] 856 return next_in_mro 857 858 859 def _make_subclasshook(cls): 860 """Construct a __subclasshook__ callable that incorporates 861 the associated __extra__ class in subclass checks performed 862 against cls. 863 """ 864 if isinstance(cls.__extra__, abc.ABCMeta): 865 # The logic mirrors that of ABCMeta.__subclasscheck__. 866 # Registered classes need not be checked here because 867 # cls and its extra share the same _abc_registry. 868 def __extrahook__(subclass): 869 res = cls.__extra__.__subclasshook__(subclass) 870 if res is not NotImplemented: 871 return res 872 if cls.__extra__ in subclass.__mro__: 873 return True 874 for scls in cls.__extra__.__subclasses__(): 875 if isinstance(scls, GenericMeta): 876 continue 877 if issubclass(subclass, scls): 878 return True 879 return NotImplemented 880 else: 881 # For non-ABC extras we'll just call issubclass(). 882 def __extrahook__(subclass): 883 if cls.__extra__ and issubclass(subclass, cls.__extra__): 884 return True 885 return NotImplemented 886 return __extrahook__ 887 888 889 def _no_slots_copy(dct): 890 """Internal helper: copy class __dict__ and clean slots class variables. 891 (They will be re-created if necessary by normal class machinery.) 892 """ 893 dict_copy = dict(dct) 894 if '__slots__' in dict_copy: 895 for slot in dict_copy['__slots__']: 896 dict_copy.pop(slot, None) 897 return dict_copy 898 899 900 class GenericMeta(TypingMeta, abc.ABCMeta): 901 """Metaclass for generic types. 902 903 This is a metaclass for typing.Generic and generic ABCs defined in 904 typing module. User defined subclasses of GenericMeta can override 905 __new__ and invoke super().__new__. Note that GenericMeta.__new__ 906 has strict rules on what is allowed in its bases argument: 907 * plain Generic is disallowed in bases; 908 * Generic[...] should appear in bases at most once; 909 * if Generic[...] is present, then it should list all type variables 910 that appear in other bases. 911 In addition, type of all generic bases is erased, e.g., C[int] is 912 stripped to plain C. 913 """ 914 915 def __new__(cls, name, bases, namespace, 916 tvars=None, args=None, origin=None, extra=None, orig_bases=None): 917 """Create a new generic class. GenericMeta.__new__ accepts 918 keyword arguments that are used for internal bookkeeping, therefore 919 an override should pass unused keyword arguments to super(). 920 """ 921 if tvars is not None: 922 # Called from __getitem__() below. 923 assert origin is not None 924 assert all(isinstance(t, TypeVar) for t in tvars), tvars 925 else: 926 # Called from class statement. 927 assert tvars is None, tvars 928 assert args is None, args 929 assert origin is None, origin 930 931 # Get the full set of tvars from the bases. 932 tvars = _type_vars(bases) 933 # Look for Generic[T1, ..., Tn]. 934 # If found, tvars must be a subset of it. 935 # If not found, tvars is it. 936 # Also check for and reject plain Generic, 937 # and reject multiple Generic[...]. 938 gvars = None 939 for base in bases: 940 if base is Generic: 941 raise TypeError("Cannot inherit from plain Generic") 942 if (isinstance(base, GenericMeta) and 943 base.__origin__ is Generic): 944 if gvars is not None: 945 raise TypeError( 946 "Cannot inherit from Generic[...] multiple types.") 947 gvars = base.__parameters__ 948 if gvars is None: 949 gvars = tvars 950 else: 951 tvarset = set(tvars) 952 gvarset = set(gvars) 953 if not tvarset <= gvarset: 954 raise TypeError( 955 "Some type variables (%s) " 956 "are not listed in Generic[%s]" % 957 (", ".join(str(t) for t in tvars if t not in gvarset), 958 ", ".join(str(g) for g in gvars))) 959 tvars = gvars 960 961 initial_bases = bases 962 if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: 963 bases = (extra,) + bases 964 bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) 965 966 # remove bare Generic from bases if there are other generic bases 967 if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): 968 bases = tuple(b for b in bases if b is not Generic) 969 namespace.update({'__origin__': origin, '__extra__': extra}) 970 self = super().__new__(cls, name, bases, namespace, _root=True) 971 972 self.__parameters__ = tvars 973 # Be prepared that GenericMeta will be subclassed by TupleMeta 974 # and CallableMeta, those two allow ..., (), or [] in __args___. 975 self.__args__ = tuple(... if a is _TypingEllipsis else 976 () if a is _TypingEmpty else 977 a for a in args) if args else None 978 # Speed hack (https://github.com/python/typing/issues/196). 979 self.__next_in_mro__ = _next_in_mro(self) 980 # Preserve base classes on subclassing (__bases__ are type erased now). 981 if orig_bases is None: 982 self.__orig_bases__ = initial_bases 983 984 # This allows unparameterized generic collections to be used 985 # with issubclass() and isinstance() in the same way as their 986 # collections.abc counterparts (e.g., isinstance([], Iterable)). 987 if ( 988 '__subclasshook__' not in namespace and extra or 989 # allow overriding 990 getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' 991 ): 992 self.__subclasshook__ = _make_subclasshook(self) 993 if isinstance(extra, abc.ABCMeta): 994 self._abc_registry = extra._abc_registry 995 self._abc_cache = extra._abc_cache 996 elif origin is not None: 997 self._abc_registry = origin._abc_registry 998 self._abc_cache = origin._abc_cache 999 1000 if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. 1001 self.__qualname__ = origin.__qualname__ 1002 self.__tree_hash__ = (hash(self._subs_tree()) if origin else 1003 super(GenericMeta, self).__hash__()) 1004 return self 1005 1006 # _abc_negative_cache and _abc_negative_cache_version 1007 # realised as descriptors, since GenClass[t1, t2, ...] always 1008 # share subclass info with GenClass. 1009 # This is an important memory optimization. 1010 @property 1011 def _abc_negative_cache(self): 1012 if isinstance(self.__extra__, abc.ABCMeta): 1013 return self.__extra__._abc_negative_cache 1014 return _gorg(self)._abc_generic_negative_cache 1015 1016 @_abc_negative_cache.setter 1017 def _abc_negative_cache(self, value): 1018 if self.__origin__ is None: 1019 if isinstance(self.__extra__, abc.ABCMeta): 1020 self.__extra__._abc_negative_cache = value 1021 else: 1022 self._abc_generic_negative_cache = value 1023 1024 @property 1025 def _abc_negative_cache_version(self): 1026 if isinstance(self.__extra__, abc.ABCMeta): 1027 return self.__extra__._abc_negative_cache_version 1028 return _gorg(self)._abc_generic_negative_cache_version 1029 1030 @_abc_negative_cache_version.setter 1031 def _abc_negative_cache_version(self, value): 1032 if self.__origin__ is None: 1033 if isinstance(self.__extra__, abc.ABCMeta): 1034 self.__extra__._abc_negative_cache_version = value 1035 else: 1036 self._abc_generic_negative_cache_version = value 1037 1038 def _get_type_vars(self, tvars): 1039 if self.__origin__ and self.__parameters__: 1040 _get_type_vars(self.__parameters__, tvars) 1041 1042 def _eval_type(self, globalns, localns): 1043 ev_origin = (self.__origin__._eval_type(globalns, localns) 1044 if self.__origin__ else None) 1045 ev_args = tuple(_eval_type(a, globalns, localns) for a 1046 in self.__args__) if self.__args__ else None 1047 if ev_origin == self.__origin__ and ev_args == self.__args__: 1048 return self 1049 return self.__class__(self.__name__, 1050 self.__bases__, 1051 _no_slots_copy(self.__dict__), 1052 tvars=_type_vars(ev_args) if ev_args else None, 1053 args=ev_args, 1054 origin=ev_origin, 1055 extra=self.__extra__, 1056 orig_bases=self.__orig_bases__) 1057 1058 def __repr__(self): 1059 if self.__origin__ is None: 1060 return super().__repr__() 1061 return self._tree_repr(self._subs_tree()) 1062 1063 def _tree_repr(self, tree): 1064 arg_list = [] 1065 for arg in tree[1:]: 1066 if arg == (): 1067 arg_list.append('()') 1068 elif not isinstance(arg, tuple): 1069 arg_list.append(_type_repr(arg)) 1070 else: 1071 arg_list.append(arg[0]._tree_repr(arg)) 1072 return super().__repr__() + '[%s]' % ', '.join(arg_list) 1073 1074 def _subs_tree(self, tvars=None, args=None): 1075 if self.__origin__ is None: 1076 return self 1077 tree_args = _subs_tree(self, tvars, args) 1078 return (_gorg(self),) + tuple(tree_args) 1079 1080 def __eq__(self, other): 1081 if not isinstance(other, GenericMeta): 1082 return NotImplemented 1083 if self.__origin__ is None or other.__origin__ is None: 1084 return self is other 1085 return self.__tree_hash__ == other.__tree_hash__ 1086 1087 def __hash__(self): 1088 return self.__tree_hash__ 1089 1090 @_tp_cache 1091 def __getitem__(self, params): 1092 if not isinstance(params, tuple): 1093 params = (params,) 1094 if not params and not _gorg(self) is Tuple: 1095 raise TypeError( 1096 "Parameter list to %s[...] cannot be empty" % _qualname(self)) 1097 msg = "Parameters to generic types must be types." 1098 params = tuple(_type_check(p, msg) for p in params) 1099 if self is Generic: 1100 # Generic can only be subscripted with unique type variables. 1101 if not all(isinstance(p, TypeVar) for p in params): 1102 raise TypeError( 1103 "Parameters to Generic[...] must all be type variables") 1104 if len(set(params)) != len(params): 1105 raise TypeError( 1106 "Parameters to Generic[...] must all be unique") 1107 tvars = params 1108 args = params 1109 elif self in (Tuple, Callable): 1110 tvars = _type_vars(params) 1111 args = params 1112 elif self is _Protocol: 1113 # _Protocol is internal, don't check anything. 1114 tvars = params 1115 args = params 1116 elif self.__origin__ in (Generic, _Protocol): 1117 # Can't subscript Generic[...] or _Protocol[...]. 1118 raise TypeError("Cannot subscript already-subscripted %s" % 1119 repr(self)) 1120 else: 1121 # Subscripting a regular Generic subclass. 1122 _check_generic(self, params) 1123 tvars = _type_vars(params) 1124 args = params 1125 1126 prepend = (self,) if self.__origin__ is None else () 1127 return self.__class__(self.__name__, 1128 prepend + self.__bases__, 1129 _no_slots_copy(self.__dict__), 1130 tvars=tvars, 1131 args=args, 1132 origin=self, 1133 extra=self.__extra__, 1134 orig_bases=self.__orig_bases__) 1135 1136 def __subclasscheck__(self, cls): 1137 if self.__origin__ is not None: 1138 if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: 1139 raise TypeError("Parameterized generics cannot be used with class " 1140 "or instance checks") 1141 return False 1142 if self is Generic: 1143 raise TypeError("Class %r cannot be used with class " 1144 "or instance checks" % self) 1145 return super().__subclasscheck__(cls) 1146 1147 def __instancecheck__(self, instance): 1148 # Since we extend ABC.__subclasscheck__ and 1149 # ABC.__instancecheck__ inlines the cache checking done by the 1150 # latter, we must extend __instancecheck__ too. For simplicity 1151 # we just skip the cache check -- instance checks for generic 1152 # classes are supposed to be rare anyways. 1153 return issubclass(instance.__class__, self) 1154 1155 def __copy__(self): 1156 return self.__class__(self.__name__, self.__bases__, 1157 _no_slots_copy(self.__dict__), 1158 self.__parameters__, self.__args__, self.__origin__, 1159 self.__extra__, self.__orig_bases__) 1160 1161 def __setattr__(self, attr, value): 1162 # We consider all the subscripted genrics as proxies for original class 1163 if ( 1164 attr.startswith('__') and attr.endswith('__') or 1165 attr.startswith('_abc_') 1166 ): 1167 super(GenericMeta, self).__setattr__(attr, value) 1168 else: 1169 super(GenericMeta, _gorg(self)).__setattr__(attr, value) 1170 1171 1172 # Prevent checks for Generic to crash when defining Generic. 1173 Generic = None 1174 1175 1176 def _generic_new(base_cls, cls, *args, **kwds): 1177 # Assure type is erased on instantiation, 1178 # but attempt to store it in __orig_class__ 1179 if cls.__origin__ is None: 1180 return base_cls.__new__(cls) 1181 else: 1182 origin = _gorg(cls) 1183 obj = base_cls.__new__(origin) 1184 try: 1185 obj.__orig_class__ = cls 1186 except AttributeError: 1187 pass 1188 obj.__init__(*args, **kwds) 1189 return obj 1190 1191 1192 class Generic(metaclass=GenericMeta): 1193 """Abstract base class for generic types. 1194 1195 A generic type is typically declared by inheriting from 1196 this class parameterized with one or more type variables. 1197 For example, a generic mapping type might be defined as:: 1198 1199 class Mapping(Generic[KT, VT]): 1200 def __getitem__(self, key: KT) -> VT: 1201 ... 1202 # Etc. 1203 1204 This class can then be used as follows:: 1205 1206 def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: 1207 try: 1208 return mapping[key] 1209 except KeyError: 1210 return default 1211 """ 1212 1213 __slots__ = () 1214 1215 def __new__(cls, *args, **kwds): 1216 if _geqv(cls, Generic): 1217 raise TypeError("Type Generic cannot be instantiated; " 1218 "it can be used only as a base class") 1219 return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) 1220 1221 1222 class _TypingEmpty: 1223 """Internal placeholder for () or []. Used by TupleMeta and CallableMeta 1224 to allow empty list/tuple in specific places, without allowing them 1225 to sneak in where prohibited. 1226 """ 1227 1228 1229 class _TypingEllipsis: 1230 """Internal placeholder for ... (ellipsis).""" 1231 1232 1233 class TupleMeta(GenericMeta): 1234 """Metaclass for Tuple (internal).""" 1235 1236 @_tp_cache 1237 def __getitem__(self, parameters): 1238 if self.__origin__ is not None or not _geqv(self, Tuple): 1239 # Normal generic rules apply if this is not the first subscription 1240 # or a subscription of a subclass. 1241 return super().__getitem__(parameters) 1242 if parameters == (): 1243 return super().__getitem__((_TypingEmpty,)) 1244 if not isinstance(parameters, tuple): 1245 parameters = (parameters,) 1246 if len(parameters) == 2 and parameters[1] is ...: 1247 msg = "Tuple[t, ...]: t must be a type." 1248 p = _type_check(parameters[0], msg) 1249 return super().__getitem__((p, _TypingEllipsis)) 1250 msg = "Tuple[t0, t1, ...]: each t must be a type." 1251 parameters = tuple(_type_check(p, msg) for p in parameters) 1252 return super().__getitem__(parameters) 1253 1254 def __instancecheck__(self, obj): 1255 if self.__args__ is None: 1256 return isinstance(obj, tuple) 1257 raise TypeError("Parameterized Tuple cannot be used " 1258 "with isinstance().") 1259 1260 def __subclasscheck__(self, cls): 1261 if self.__args__ is None: 1262 return issubclass(cls, tuple) 1263 raise TypeError("Parameterized Tuple cannot be used " 1264 "with issubclass().") 1265 1266 1267 class Tuple(tuple, extra=tuple, metaclass=TupleMeta): 1268 """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. 1269 1270 Example: Tuple[T1, T2] is a tuple of two elements corresponding 1271 to type variables T1 and T2. Tuple[int, float, str] is a tuple 1272 of an int, a float and a string. 1273 1274 To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. 1275 """ 1276 1277 __slots__ = () 1278 1279 def __new__(cls, *args, **kwds): 1280 if _geqv(cls, Tuple): 1281 raise TypeError("Type Tuple cannot be instantiated; " 1282 "use tuple() instead") 1283 return _generic_new(tuple, cls, *args, **kwds) 1284 1285 1286 class CallableMeta(GenericMeta): 1287 """Metaclass for Callable (internal).""" 1288 1289 def __repr__(self): 1290 if self.__origin__ is None: 1291 return super().__repr__() 1292 return self._tree_repr(self._subs_tree()) 1293 1294 def _tree_repr(self, tree): 1295 if _gorg(self) is not Callable: 1296 return super()._tree_repr(tree) 1297 # For actual Callable (not its subclass) we override 1298 # super()._tree_repr() for nice formatting. 1299 arg_list = [] 1300 for arg in tree[1:]: 1301 if not isinstance(arg, tuple): 1302 arg_list.append(_type_repr(arg)) 1303 else: 1304 arg_list.append(arg[0]._tree_repr(arg)) 1305 if arg_list[0] == '...': 1306 return repr(tree[0]) + '[..., %s]' % arg_list[1] 1307 return (repr(tree[0]) + 1308 '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) 1309 1310 def __getitem__(self, parameters): 1311 """A thin wrapper around __getitem_inner__ to provide the latter 1312 with hashable arguments to improve speed. 1313 """ 1314 1315 if self.__origin__ is not None or not _geqv(self, Callable): 1316 return super().__getitem__(parameters) 1317 if not isinstance(parameters, tuple) or len(parameters) != 2: 1318 raise TypeError("Callable must be used as " 1319 "Callable[[arg, ...], result].") 1320 args, result = parameters 1321 if args is Ellipsis: 1322 parameters = (Ellipsis, result) 1323 else: 1324 if not isinstance(args, list): 1325 raise TypeError("Callable[args, result]: args must be a list." 1326 " Got %.100r." % (args,)) 1327 parameters = (tuple(args), result) 1328 return self.__getitem_inner__(parameters) 1329 1330 @_tp_cache 1331 def __getitem_inner__(self, parameters): 1332 args, result = parameters 1333 msg = "Callable[args, result]: result must be a type." 1334 result = _type_check(result, msg) 1335 if args is Ellipsis: 1336 return super().__getitem__((_TypingEllipsis, result)) 1337 msg = "Callable[[arg, ...], result]: each arg must be a type." 1338 args = tuple(_type_check(arg, msg) for arg in args) 1339 parameters = args + (result,) 1340 return super().__getitem__(parameters) 1341 1342 1343 class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): 1344 """Callable type; Callable[[int], str] is a function of (int) -> str. 1345 1346 The subscription syntax must always be used with exactly two 1347 values: the argument list and the return type. The argument list 1348 must be a list of types or ellipsis; the return type must be a single type. 1349 1350 There is no syntax to indicate optional or keyword arguments, 1351 such function types are rarely used as callback types. 1352 """ 1353 1354 __slots__ = () 1355 1356 def __new__(cls, *args, **kwds): 1357 if _geqv(cls, Callable): 1358 raise TypeError("Type Callable cannot be instantiated; " 1359 "use a non-abstract subclass instead") 1360 return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) 1361 1362 1363 class _ClassVar(_FinalTypingBase, _root=True): 1364 """Special type construct to mark class variables. 1365 1366 An annotation wrapped in ClassVar indicates that a given 1367 attribute is intended to be used as a class variable and 1368 should not be set on instances of that class. Usage:: 1369 1370 class Starship: 1371 stats: ClassVar[Dict[str, int]] = {} # class variable 1372 damage: int = 10 # instance variable 1373 1374 ClassVar accepts only types and cannot be further subscribed. 1375 1376 Note that ClassVar is not a class itself, and should not 1377 be used with isinstance() or issubclass(). 1378 """ 1379 1380 __slots__ = ('__type__',) 1381 1382 def __init__(self, tp=None, **kwds): 1383 self.__type__ = tp 1384 1385 def __getitem__(self, item): 1386 cls = type(self) 1387 if self.__type__ is None: 1388 return cls(_type_check(item, 1389 '{} accepts only single type.'.format(cls.__name__[1:])), 1390 _root=True) 1391 raise TypeError('{} cannot be further subscripted' 1392 .format(cls.__name__[1:])) 1393 1394 def _eval_type(self, globalns, localns): 1395 new_tp = _eval_type(self.__type__, globalns, localns) 1396 if new_tp == self.__type__: 1397 return self 1398 return type(self)(new_tp, _root=True) 1399 1400 def __repr__(self): 1401 r = super().__repr__() 1402 if self.__type__ is not None: 1403 r += '[{}]'.format(_type_repr(self.__type__)) 1404 return r 1405 1406 def __hash__(self): 1407 return hash((type(self).__name__, self.__type__)) 1408 1409 def __eq__(self, other): 1410 if not isinstance(other, _ClassVar): 1411 return NotImplemented 1412 if self.__type__ is not None: 1413 return self.__type__ == other.__type__ 1414 return self is other 1415 1416 1417 ClassVar = _ClassVar(_root=True) 1418 1419 1420 def cast(typ, val): 1421 """Cast a value to a type. 1422 1423 This returns the value unchanged. To the type checker this 1424 signals that the return value has the designated type, but at 1425 runtime we intentionally don't check anything (we want this 1426 to be as fast as possible). 1427 """ 1428 return val 1429 1430 1431 def _get_defaults(func): 1432 """Internal helper to extract the default arguments, by name.""" 1433 try: 1434 code = func.__code__ 1435 except AttributeError: 1436 # Some built-in functions don't have __code__, __defaults__, etc. 1437 return {} 1438 pos_count = code.co_argcount 1439 arg_names = code.co_varnames 1440 arg_names = arg_names[:pos_count] 1441 defaults = func.__defaults__ or () 1442 kwdefaults = func.__kwdefaults__ 1443 res = dict(kwdefaults) if kwdefaults else {} 1444 pos_offset = pos_count - len(defaults) 1445 for name, value in zip(arg_names[pos_offset:], defaults): 1446 assert name not in res 1447 res[name] = value 1448 return res 1449 1450 1451 _allowed_types = (types.FunctionType, types.BuiltinFunctionType, 1452 types.MethodType, types.ModuleType, 1453 SlotWrapperType, MethodWrapperType, MethodDescriptorType) 1454 1455 1456 def get_type_hints(obj, globalns=None, localns=None): 1457 """Return type hints for an object. 1458 1459 This is often the same as obj.__annotations__, but it handles 1460 forward references encoded as string literals, and if necessary 1461 adds Optional[t] if a default value equal to None is set. 1462 1463 The argument may be a module, class, method, or function. The annotations 1464 are returned as a dictionary. For classes, annotations include also 1465 inherited members. 1466 1467 TypeError is raised if the argument is not of a type that can contain 1468 annotations, and an empty dictionary is returned if no annotations are 1469 present. 1470 1471 BEWARE -- the behavior of globalns and localns is counterintuitive 1472 (unless you are familiar with how eval() and exec() work). The 1473 search order is locals first, then globals. 1474 1475 - If no dict arguments are passed, an attempt is made to use the 1476 globals from obj, and these are also used as the locals. If the 1477 object does not appear to have globals, an exception is raised. 1478 1479 - If one dict argument is passed, it is used for both globals and 1480 locals. 1481 1482 - If two dict arguments are passed, they specify globals and 1483 locals, respectively. 1484 """ 1485 1486 if getattr(obj, '__no_type_check__', None): 1487 return {} 1488 if globalns is None: 1489 globalns = getattr(obj, '__globals__', {}) 1490 if localns is None: 1491 localns = globalns 1492 elif localns is None: 1493 localns = globalns 1494 # Classes require a special treatment. 1495 if isinstance(obj, type): 1496 hints = {} 1497 for base in reversed(obj.__mro__): 1498 ann = base.__dict__.get('__annotations__', {}) 1499 for name, value in ann.items(): 1500 if value is None: 1501 value = type(None) 1502 if isinstance(value, str): 1503 value = _ForwardRef(value) 1504 value = _eval_type(value, globalns, localns) 1505 hints[name] = value 1506 return hints 1507 hints = getattr(obj, '__annotations__', None) 1508 if hints is None: 1509 # Return empty annotations for something that _could_ have them. 1510 if isinstance(obj, _allowed_types): 1511 return {} 1512 else: 1513 raise TypeError('{!r} is not a module, class, method, ' 1514 'or function.'.format(obj)) 1515 defaults = _get_defaults(obj) 1516 hints = dict(hints) 1517 for name, value in hints.items(): 1518 if value is None: 1519 value = type(None) 1520 if isinstance(value, str): 1521 value = _ForwardRef(value) 1522 value = _eval_type(value, globalns, localns) 1523 if name in defaults and defaults[name] is None: 1524 value = Optional[value] 1525 hints[name] = value 1526 return hints 1527 1528 1529 def no_type_check(arg): 1530 """Decorator to indicate that annotations are not type hints. 1531 1532 The argument must be a class or function; if it is a class, it 1533 applies recursively to all methods and classes defined in that class 1534 (but not to methods defined in its superclasses or subclasses). 1535 1536 This mutates the function(s) or class(es) in place. 1537 """ 1538 if isinstance(arg, type): 1539 arg_attrs = arg.__dict__.copy() 1540 for attr, val in arg.__dict__.items(): 1541 if val in arg.__bases__: 1542 arg_attrs.pop(attr) 1543 for obj in arg_attrs.values(): 1544 if isinstance(obj, types.FunctionType): 1545 obj.__no_type_check__ = True 1546 if isinstance(obj, type): 1547 no_type_check(obj) 1548 try: 1549 arg.__no_type_check__ = True 1550 except TypeError: # built-in classes 1551 pass 1552 return arg 1553 1554 1555 def no_type_check_decorator(decorator): 1556 """Decorator to give another decorator the @no_type_check effect. 1557 1558 This wraps the decorator with something that wraps the decorated 1559 function in @no_type_check. 1560 """ 1561 1562 @functools.wraps(decorator) 1563 def wrapped_decorator(*args, **kwds): 1564 func = decorator(*args, **kwds) 1565 func = no_type_check(func) 1566 return func 1567 1568 return wrapped_decorator 1569 1570 1571 def _overload_dummy(*args, **kwds): 1572 """Helper for @overload to raise when called.""" 1573 raise NotImplementedError( 1574 "You should not call an overloaded function. " 1575 "A series of @overload-decorated functions " 1576 "outside a stub module should always be followed " 1577 "by an implementation that is not @overload-ed.") 1578 1579 1580 def overload(func): 1581 """Decorator for overloaded functions/methods. 1582 1583 In a stub file, place two or more stub definitions for the same 1584 function in a row, each decorated with @overload. For example: 1585 1586 @overload 1587 def utf8(value: None) -> None: ... 1588 @overload 1589 def utf8(value: bytes) -> bytes: ... 1590 @overload 1591 def utf8(value: str) -> bytes: ... 1592 1593 In a non-stub file (i.e. a regular .py file), do the same but 1594 follow it with an implementation. The implementation should *not* 1595 be decorated with @overload. For example: 1596 1597 @overload 1598 def utf8(value: None) -> None: ... 1599 @overload 1600 def utf8(value: bytes) -> bytes: ... 1601 @overload 1602 def utf8(value: str) -> bytes: ... 1603 def utf8(value): 1604 # implementation goes here 1605 """ 1606 return _overload_dummy 1607 1608 1609 class _ProtocolMeta(GenericMeta): 1610 """Internal metaclass for _Protocol. 1611 1612 This exists so _Protocol classes can be generic without deriving 1613 from Generic. 1614 """ 1615 1616 def __instancecheck__(self, obj): 1617 if _Protocol not in self.__bases__: 1618 return super().__instancecheck__(obj) 1619 raise TypeError("Protocols cannot be used with isinstance().") 1620 1621 def __subclasscheck__(self, cls): 1622 if not self._is_protocol: 1623 # No structural checks since this isn't a protocol. 1624 return NotImplemented 1625 1626 if self is _Protocol: 1627 # Every class is a subclass of the empty protocol. 1628 return True 1629 1630 # Find all attributes defined in the protocol. 1631 attrs = self._get_protocol_attrs() 1632 1633 for attr in attrs: 1634 if not any(attr in d.__dict__ for d in cls.__mro__): 1635 return False 1636 return True 1637 1638 def _get_protocol_attrs(self): 1639 # Get all Protocol base classes. 1640 protocol_bases = [] 1641 for c in self.__mro__: 1642 if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': 1643 protocol_bases.append(c) 1644 1645 # Get attributes included in protocol. 1646 attrs = set() 1647 for base in protocol_bases: 1648 for attr in base.__dict__.keys(): 1649 # Include attributes not defined in any non-protocol bases. 1650 for c in self.__mro__: 1651 if (c is not base and attr in c.__dict__ and 1652 not getattr(c, '_is_protocol', False)): 1653 break 1654 else: 1655 if (not attr.startswith('_abc_') and 1656 attr != '__abstractmethods__' and 1657 attr != '__annotations__' and 1658 attr != '__weakref__' and 1659 attr != '_is_protocol' and 1660 attr != '__dict__' and 1661 attr != '__args__' and 1662 attr != '__slots__' and 1663 attr != '_get_protocol_attrs' and 1664 attr != '__next_in_mro__' and 1665 attr != '__parameters__' and 1666 attr != '__origin__' and 1667 attr != '__orig_bases__' and 1668 attr != '__extra__' and 1669 attr != '__tree_hash__' and 1670 attr != '__module__'): 1671 attrs.add(attr) 1672 1673 return attrs 1674 1675 1676 class _Protocol(metaclass=_ProtocolMeta): 1677 """Internal base class for protocol classes. 1678 1679 This implements a simple-minded structural issubclass check 1680 (similar but more general than the one-offs in collections.abc 1681 such as Hashable). 1682 """ 1683 1684 __slots__ = () 1685 1686 _is_protocol = True 1687 1688 1689 # Various ABCs mimicking those in collections.abc. 1690 # A few are simply re-exported for completeness. 1691 1692 Hashable = collections_abc.Hashable # Not generic. 1693 1694 1695 if hasattr(collections_abc, 'Awaitable'): 1696 class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): 1697 __slots__ = () 1698 1699 __all__.append('Awaitable') 1700 1701 1702 if hasattr(collections_abc, 'Coroutine'): 1703 class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], 1704 extra=collections_abc.Coroutine): 1705 __slots__ = () 1706 1707 __all__.append('Coroutine') 1708 1709 1710 if hasattr(collections_abc, 'AsyncIterable'): 1711 1712 class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): 1713 __slots__ = () 1714 1715 class AsyncIterator(AsyncIterable[T_co], 1716 extra=collections_abc.AsyncIterator): 1717 __slots__ = () 1718 1719 __all__.append('AsyncIterable') 1720 __all__.append('AsyncIterator') 1721 1722 1723 class Iterable(Generic[T_co], extra=collections_abc.Iterable): 1724 __slots__ = () 1725 1726 1727 class Iterator(Iterable[T_co], extra=collections_abc.Iterator): 1728 __slots__ = () 1729 1730 1731 class SupportsInt(_Protocol): 1732 __slots__ = () 1733 1734 @abstractmethod 1735 def __int__(self) -> int: 1736 pass 1737 1738 1739 class SupportsFloat(_Protocol): 1740 __slots__ = () 1741 1742 @abstractmethod 1743 def __float__(self) -> float: 1744 pass 1745 1746 1747 class SupportsComplex(_Protocol): 1748 __slots__ = () 1749 1750 @abstractmethod 1751 def __complex__(self) -> complex: 1752 pass 1753 1754 1755 class SupportsBytes(_Protocol): 1756 __slots__ = () 1757 1758 @abstractmethod 1759 def __bytes__(self) -> bytes: 1760 pass 1761 1762 1763 class SupportsAbs(_Protocol[T_co]): 1764 __slots__ = () 1765 1766 @abstractmethod 1767 def __abs__(self) -> T_co: 1768 pass 1769 1770 1771 class SupportsRound(_Protocol[T_co]): 1772 __slots__ = () 1773 1774 @abstractmethod 1775 def __round__(self, ndigits: int = 0) -> T_co: 1776 pass 1777 1778 1779 if hasattr(collections_abc, 'Reversible'): 1780 class Reversible(Iterable[T_co], extra=collections_abc.Reversible): 1781 __slots__ = () 1782 else: 1783 class Reversible(_Protocol[T_co]): 1784 __slots__ = () 1785 1786 @abstractmethod 1787 def __reversed__(self) -> 'Iterator[T_co]': 1788 pass 1789 1790 1791 Sized = collections_abc.Sized # Not generic. 1792 1793 1794 class Container(Generic[T_co], extra=collections_abc.Container): 1795 __slots__ = () 1796 1797 1798 if hasattr(collections_abc, 'Collection'): 1799 class Collection(Sized, Iterable[T_co], Container[T_co], 1800 extra=collections_abc.Collection): 1801 __slots__ = () 1802 1803 __all__.append('Collection') 1804 1805 1806 # Callable was defined earlier. 1807 1808 if hasattr(collections_abc, 'Collection'): 1809 class AbstractSet(Collection[T_co], 1810 extra=collections_abc.Set): 1811 __slots__ = () 1812 else: 1813 class AbstractSet(Sized, Iterable[T_co], Container[T_co], 1814 extra=collections_abc.Set): 1815 __slots__ = () 1816 1817 1818 class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): 1819 __slots__ = () 1820 1821 1822 # NOTE: It is only covariant in the value type. 1823 if hasattr(collections_abc, 'Collection'): 1824 class Mapping(Collection[KT], Generic[KT, VT_co], 1825 extra=collections_abc.Mapping): 1826 __slots__ = () 1827 else: 1828 class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], 1829 extra=collections_abc.Mapping): 1830 __slots__ = () 1831 1832 1833 class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): 1834 __slots__ = () 1835 1836 1837 if hasattr(collections_abc, 'Reversible'): 1838 if hasattr(collections_abc, 'Collection'): 1839 class Sequence(Reversible[T_co], Collection[T_co], 1840 extra=collections_abc.Sequence): 1841 __slots__ = () 1842 else: 1843 class Sequence(Sized, Reversible[T_co], Container[T_co], 1844 extra=collections_abc.Sequence): 1845 __slots__ = () 1846 else: 1847 class Sequence(Sized, Iterable[T_co], Container[T_co], 1848 extra=collections_abc.Sequence): 1849 __slots__ = () 1850 1851 1852 class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): 1853 __slots__ = () 1854 1855 1856 class ByteString(Sequence[int], extra=collections_abc.ByteString): 1857 __slots__ = () 1858 1859 1860 class List(list, MutableSequence[T], extra=list): 1861 1862 __slots__ = () 1863 1864 def __new__(cls, *args, **kwds): 1865 if _geqv(cls, List): 1866 raise TypeError("Type List cannot be instantiated; " 1867 "use list() instead") 1868 return _generic_new(list, cls, *args, **kwds) 1869 1870 1871 class Deque(collections.deque, MutableSequence[T], extra=collections.deque): 1872 1873 __slots__ = () 1874 1875 def __new__(cls, *args, **kwds): 1876 if _geqv(cls, Deque): 1877 return collections.deque(*args, **kwds) 1878 return _generic_new(collections.deque, cls, *args, **kwds) 1879 1880 1881 class Set(set, MutableSet[T], extra=set): 1882 1883 __slots__ = () 1884 1885 def __new__(cls, *args, **kwds): 1886 if _geqv(cls, Set): 1887 raise TypeError("Type Set cannot be instantiated; " 1888 "use set() instead") 1889 return _generic_new(set, cls, *args, **kwds) 1890 1891 1892 class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): 1893 __slots__ = () 1894 1895 def __new__(cls, *args, **kwds): 1896 if _geqv(cls, FrozenSet): 1897 raise TypeError("Type FrozenSet cannot be instantiated; " 1898 "use frozenset() instead") 1899 return _generic_new(frozenset, cls, *args, **kwds) 1900 1901 1902 class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): 1903 __slots__ = () 1904 1905 1906 class KeysView(MappingView[KT], AbstractSet[KT], 1907 extra=collections_abc.KeysView): 1908 __slots__ = () 1909 1910 1911 class ItemsView(MappingView[Tuple[KT, VT_co]], 1912 AbstractSet[Tuple[KT, VT_co]], 1913 Generic[KT, VT_co], 1914 extra=collections_abc.ItemsView): 1915 __slots__ = () 1916 1917 1918 class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): 1919 __slots__ = () 1920 1921 1922 if hasattr(contextlib, 'AbstractContextManager'): 1923 class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): 1924 __slots__ = () 1925 __all__.append('ContextManager') 1926 1927 1928 class Dict(dict, MutableMapping[KT, VT], extra=dict): 1929 1930 __slots__ = () 1931 1932 def __new__(cls, *args, **kwds): 1933 if _geqv(cls, Dict): 1934 raise TypeError("Type Dict cannot be instantiated; " 1935 "use dict() instead") 1936 return _generic_new(dict, cls, *args, **kwds) 1937 1938 1939 class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], 1940 extra=collections.defaultdict): 1941 1942 __slots__ = () 1943 1944 def __new__(cls, *args, **kwds): 1945 if _geqv(cls, DefaultDict): 1946 return collections.defaultdict(*args, **kwds) 1947 return _generic_new(collections.defaultdict, cls, *args, **kwds) 1948 1949 1950 class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): 1951 1952 __slots__ = () 1953 1954 def __new__(cls, *args, **kwds): 1955 if _geqv(cls, Counter): 1956 return collections.Counter(*args, **kwds) 1957 return _generic_new(collections.Counter, cls, *args, **kwds) 1958 1959 1960 if hasattr(collections, 'ChainMap'): 1961 # ChainMap only exists in 3.3+ 1962 __all__.append('ChainMap') 1963 1964 class ChainMap(collections.ChainMap, MutableMapping[KT, VT], 1965 extra=collections.ChainMap): 1966 1967 __slots__ = () 1968 1969 def __new__(cls, *args, **kwds): 1970 if _geqv(cls, ChainMap): 1971 return collections.ChainMap(*args, **kwds) 1972 return _generic_new(collections.ChainMap, cls, *args, **kwds) 1973 1974 1975 # Determine what base class to use for Generator. 1976 if hasattr(collections_abc, 'Generator'): 1977 # Sufficiently recent versions of 3.5 have a Generator ABC. 1978 _G_base = collections_abc.Generator 1979 else: 1980 # Fall back on the exact type. 1981 _G_base = types.GeneratorType 1982 1983 1984 class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], 1985 extra=_G_base): 1986 __slots__ = () 1987 1988 def __new__(cls, *args, **kwds): 1989 if _geqv(cls, Generator): 1990 raise TypeError("Type Generator cannot be instantiated; " 1991 "create a subclass instead") 1992 return _generic_new(_G_base, cls, *args, **kwds) 1993 1994 1995 if hasattr(collections_abc, 'AsyncGenerator'): 1996 class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], 1997 extra=collections_abc.AsyncGenerator): 1998 __slots__ = () 1999 2000 __all__.append('AsyncGenerator') 2001 2002 2003 # Internal type variable used for Type[]. 2004 CT_co = TypeVar('CT_co', covariant=True, bound=type) 2005 2006 2007 # This is not a real generic class. Don't use outside annotations. 2008 class Type(Generic[CT_co], extra=type): 2009 """A special construct usable to annotate class objects. 2010 2011 For example, suppose we have the following classes:: 2012 2013 class User: ... # Abstract base for User classes 2014 class BasicUser(User): ... 2015 class ProUser(User): ... 2016 class TeamUser(User): ... 2017 2018 And a function that takes a class argument that's a subclass of 2019 User and returns an instance of the corresponding class:: 2020 2021 U = TypeVar('U', bound=User) 2022 def new_user(user_class: Type[U]) -> U: 2023 user = user_class() 2024 # (Here we could write the user object to a database) 2025 return user 2026 2027 joe = new_user(BasicUser) 2028 2029 At this point the type checker knows that joe has type BasicUser. 2030 """ 2031 2032 __slots__ = () 2033 2034 2035 def _make_nmtuple(name, types): 2036 msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" 2037 types = [(n, _type_check(t, msg)) for n, t in types] 2038 nm_tpl = collections.namedtuple(name, [n for n, t in types]) 2039 # Prior to PEP 526, only _field_types attribute was assigned. 2040 # Now, both __annotations__ and _field_types are used to maintain compatibility. 2041 nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) 2042 try: 2043 nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') 2044 except (AttributeError, ValueError): 2045 pass 2046 return nm_tpl 2047 2048 2049 _PY36 = sys.version_info[:2] >= (3, 6) 2050 2051 # attributes prohibited to set in NamedTuple class syntax 2052 _prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', 2053 '_fields', '_field_defaults', '_field_types', 2054 '_make', '_replace', '_asdict') 2055 2056 _special = ('__module__', '__name__', '__qualname__', '__annotations__') 2057 2058 2059 class NamedTupleMeta(type): 2060 2061 def __new__(cls, typename, bases, ns): 2062 if ns.get('_root', False): 2063 return super().__new__(cls, typename, bases, ns) 2064 if not _PY36: 2065 raise TypeError("Class syntax for NamedTuple is only supported" 2066 " in Python 3.6+") 2067 types = ns.get('__annotations__', {}) 2068 nm_tpl = _make_nmtuple(typename, types.items()) 2069 defaults = [] 2070 defaults_dict = {} 2071 for field_name in types: 2072 if field_name in ns: 2073 default_value = ns[field_name] 2074 defaults.append(default_value) 2075 defaults_dict[field_name] = default_value 2076 elif defaults: 2077 raise TypeError("Non-default namedtuple field {field_name} cannot " 2078 "follow default field(s) {default_names}" 2079 .format(field_name=field_name, 2080 default_names=', '.join(defaults_dict.keys()))) 2081 nm_tpl.__new__.__defaults__ = tuple(defaults) 2082 nm_tpl._field_defaults = defaults_dict 2083 # update from user namespace without overriding special namedtuple attributes 2084 for key in ns: 2085 if key in _prohibited: 2086 raise AttributeError("Cannot overwrite NamedTuple attribute " + key) 2087 elif key not in _special and key not in nm_tpl._fields: 2088 setattr(nm_tpl, key, ns[key]) 2089 return nm_tpl 2090 2091 2092 class NamedTuple(metaclass=NamedTupleMeta): 2093 """Typed version of namedtuple. 2094 2095 Usage in Python versions >= 3.6:: 2096 2097 class Employee(NamedTuple): 2098 name: str 2099 id: int 2100 2101 This is equivalent to:: 2102 2103 Employee = collections.namedtuple('Employee', ['name', 'id']) 2104 2105 The resulting class has extra __annotations__ and _field_types 2106 attributes, giving an ordered dict mapping field names to types. 2107 __annotations__ should be preferred, while _field_types 2108 is kept to maintain pre PEP 526 compatibility. (The field names 2109 are in the _fields attribute, which is part of the namedtuple 2110 API.) Alternative equivalent keyword syntax is also accepted:: 2111 2112 Employee = NamedTuple('Employee', name=str, id=int) 2113 2114 In Python versions <= 3.5 use:: 2115 2116 Employee = NamedTuple('Employee', [('name', str), ('id', int)]) 2117 """ 2118 _root = True 2119 2120 def __new__(self, typename, fields=None, **kwargs): 2121 if kwargs and not _PY36: 2122 raise TypeError("Keyword syntax for NamedTuple is only supported" 2123 " in Python 3.6+") 2124 if fields is None: 2125 fields = kwargs.items() 2126 elif kwargs: 2127 raise TypeError("Either list of fields or keywords" 2128 " can be provided to NamedTuple, not both") 2129 return _make_nmtuple(typename, fields) 2130 2131 2132 def NewType(name, tp): 2133 """NewType creates simple unique types with almost zero 2134 runtime overhead. NewType(name, tp) is considered a subtype of tp 2135 by static type checkers. At runtime, NewType(name, tp) returns 2136 a dummy function that simply returns its argument. Usage:: 2137 2138 UserId = NewType('UserId', int) 2139 2140 def name_by_id(user_id: UserId) -> str: 2141 ... 2142 2143 UserId('user') # Fails type check 2144 2145 name_by_id(42) # Fails type check 2146 name_by_id(UserId(42)) # OK 2147 2148 num = UserId(5) + 1 # type: int 2149 """ 2150 2151 def new_type(x): 2152 return x 2153 2154 new_type.__name__ = name 2155 new_type.__supertype__ = tp 2156 return new_type 2157 2158 2159 # Python-version-specific alias (Python 2: unicode; Python 3: str) 2160 Text = str 2161 2162 2163 # Constant that's True when type checking, but False here. 2164 TYPE_CHECKING = False 2165 2166 2167 class IO(Generic[AnyStr]): 2168 """Generic base class for TextIO and BinaryIO. 2169 2170 This is an abstract, generic version of the return of open(). 2171 2172 NOTE: This does not distinguish between the different possible 2173 classes (text vs. binary, read vs. write vs. read/write, 2174 append-only, unbuffered). The TextIO and BinaryIO subclasses 2175 below capture the distinctions between text vs. binary, which is 2176 pervasive in the interface; however we currently do not offer a 2177 way to track the other distinctions in the type system. 2178 """ 2179 2180 __slots__ = () 2181 2182 @abstractproperty 2183 def mode(self) -> str: 2184 pass 2185 2186 @abstractproperty 2187 def name(self) -> str: 2188 pass 2189 2190 @abstractmethod 2191 def close(self) -> None: 2192 pass 2193 2194 @abstractmethod 2195 def closed(self) -> bool: 2196 pass 2197 2198 @abstractmethod 2199 def fileno(self) -> int: 2200 pass 2201 2202 @abstractmethod 2203 def flush(self) -> None: 2204 pass 2205 2206 @abstractmethod 2207 def isatty(self) -> bool: 2208 pass 2209 2210 @abstractmethod 2211 def read(self, n: int = -1) -> AnyStr: 2212 pass 2213 2214 @abstractmethod 2215 def readable(self) -> bool: 2216 pass 2217 2218 @abstractmethod 2219 def readline(self, limit: int = -1) -> AnyStr: 2220 pass 2221 2222 @abstractmethod 2223 def readlines(self, hint: int = -1) -> List[AnyStr]: 2224 pass 2225 2226 @abstractmethod 2227 def seek(self, offset: int, whence: int = 0) -> int: 2228 pass 2229 2230 @abstractmethod 2231 def seekable(self) -> bool: 2232 pass 2233 2234 @abstractmethod 2235 def tell(self) -> int: 2236 pass 2237 2238 @abstractmethod 2239 def truncate(self, size: int = None) -> int: 2240 pass 2241 2242 @abstractmethod 2243 def writable(self) -> bool: 2244 pass 2245 2246 @abstractmethod 2247 def write(self, s: AnyStr) -> int: 2248 pass 2249 2250 @abstractmethod 2251 def writelines(self, lines: List[AnyStr]) -> None: 2252 pass 2253 2254 @abstractmethod 2255 def __enter__(self) -> 'IO[AnyStr]': 2256 pass 2257 2258 @abstractmethod 2259 def __exit__(self, type, value, traceback) -> None: 2260 pass 2261 2262 2263 class BinaryIO(IO[bytes]): 2264 """Typed version of the return of open() in binary mode.""" 2265 2266 __slots__ = () 2267 2268 @abstractmethod 2269 def write(self, s: Union[bytes, bytearray]) -> int: 2270 pass 2271 2272 @abstractmethod 2273 def __enter__(self) -> 'BinaryIO': 2274 pass 2275 2276 2277 class TextIO(IO[str]): 2278 """Typed version of the return of open() in text mode.""" 2279 2280 __slots__ = () 2281 2282 @abstractproperty 2283 def buffer(self) -> BinaryIO: 2284 pass 2285 2286 @abstractproperty 2287 def encoding(self) -> str: 2288 pass 2289 2290 @abstractproperty 2291 def errors(self) -> Optional[str]: 2292 pass 2293 2294 @abstractproperty 2295 def line_buffering(self) -> bool: 2296 pass 2297 2298 @abstractproperty 2299 def newlines(self) -> Any: 2300 pass 2301 2302 @abstractmethod 2303 def __enter__(self) -> 'TextIO': 2304 pass 2305 2306 2307 class io: 2308 """Wrapper namespace for IO generic classes.""" 2309 2310 __all__ = ['IO', 'TextIO', 'BinaryIO'] 2311 IO = IO 2312 TextIO = TextIO 2313 BinaryIO = BinaryIO 2314 2315 2316 io.__name__ = __name__ + '.io' 2317 sys.modules[io.__name__] = io 2318 2319 2320 Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), 2321 lambda p: p.pattern) 2322 Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), 2323 lambda m: m.re.pattern) 2324 2325 2326 class re: 2327 """Wrapper namespace for re type aliases.""" 2328 2329 __all__ = ['Pattern', 'Match'] 2330 Pattern = Pattern 2331 Match = Match 2332 2333 2334 re.__name__ = __name__ + '.re' 2335 sys.modules[re.__name__] = re 2336