summaryrefslogtreecommitdiff
path: root/modules/language/python/module
diff options
context:
space:
mode:
Diffstat (limited to 'modules/language/python/module')
-rw-r--r--modules/language/python/module/enum.py876
-rw-r--r--modules/language/python/module/io.scm9
-rw-r--r--modules/language/python/module/types.scm369
3 files changed, 1252 insertions, 2 deletions
diff --git a/modules/language/python/module/enum.py b/modules/language/python/module/enum.py
new file mode 100644
index 0000000..4f388ad
--- /dev/null
+++ b/modules/language/python/module/enum.py
@@ -0,0 +1,876 @@
+module(enum) ## needed in guile atm
+
+import sys
+from types import MappingProxyType, DynamicClassAttribute
+from functools import reduce
+from operator import or_ as _or_
+
+# try _collections first to reduce startup cost
+try:
+ from _collections import OrderedDict
+except ImportError:
+ from collections import OrderedDict
+
+
+__all__ = [
+ 'EnumMeta',
+ 'Enum', 'IntEnum', 'Flag', 'IntFlag',
+ 'auto', 'unique',
+ ]
+
+
+def _is_descriptor(obj):
+ """Returns True if obj is a descriptor, False otherwise."""
+ return (
+ hasattr(obj, '__get__') or
+ hasattr(obj, '__set__') or
+ hasattr(obj, '__delete__'))
+
+
+def _is_dunder(name):
+ """Returns True if a __dunder__ name, False otherwise."""
+ return (name[:2] == name[-2:] == '__' and
+ name[2:3] != '_' and
+ name[-3:-2] != '_' and
+ len(name) > 4)
+
+
+def _is_sunder(name):
+ """Returns True if a _sunder_ name, False otherwise."""
+ return (name[0] == name[-1] == '_' and
+ name[1:2] != '_' and
+ name[-2:-1] != '_' and
+ len(name) > 2)
+
+def _make_class_unpicklable(cls):
+ """Make the given class un-picklable."""
+ def _break_on_call_reduce(self, proto):
+ raise TypeError('%r cannot be pickled' % self)
+ cls.__reduce_ex__ = _break_on_call_reduce
+ cls.__module__ = '<unknown>'
+
+_auto_null = object()
+class auto:
+ """
+ Instances are replaced with an appropriate value in Enum class suites.
+ """
+ value = _auto_null
+
+
+class _EnumDict(dict):
+ """Track enum member order and ensure member names are not reused.
+
+ EnumMeta will use the names found in self._member_names as the
+ enumeration member names.
+
+ """
+ def __init__(self):
+ super().__init__()
+ self._member_names = []
+ self._last_values = []
+
+ def __setitem__(self, key, value):
+ """Changes anything not dundered or not a descriptor.
+
+ If an enum member name is used twice, an error is raised; duplicate
+ values are not checked for.
+
+ Single underscore (sunder) names are reserved.
+
+ """
+ if _is_sunder(key):
+ if key not in (
+ '_order_', '_create_pseudo_member_',
+ '_generate_next_value_', '_missing_',
+ ):
+ raise ValueError('_names_ are reserved for future Enum use')
+ if key == '_generate_next_value_':
+ setattr(self, '_generate_next_value', value)
+ elif _is_dunder(key):
+ if key == '__order__':
+ key = '_order_'
+ elif key in self._member_names:
+ # descriptor overwriting an enum?
+ raise TypeError('Attempted to reuse key: %r' % key)
+ elif not _is_descriptor(value):
+ if key in self:
+ # enum overwriting a descriptor?
+ raise TypeError('%r already defined as: %r' % (key, self[key]))
+ if isinstance(value, auto):
+ if value.value == _auto_null:
+ value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:])
+ value = value.value
+ self._member_names.append(key)
+ self._last_values.append(value)
+ super().__setitem__(key, value)
+
+
+# Dummy value for Enum as EnumMeta explicitly checks for it, but of course
+# until EnumMeta finishes running the first time the Enum class doesn't exist.
+# This is also why there are checks in EnumMeta like `if Enum is not None`
+Enum = None
+
+
+class EnumMeta(type):
+ """Metaclass for Enum"""
+ @classmethod
+ def __prepare__(metacls, cls, bases):
+ # create the namespace dict
+ enum_dict = _EnumDict()
+ # inherit previous flags and _generate_next_value_ function
+ member_type, first_enum = metacls._get_mixins_(bases)
+ if first_enum is not None:
+ enum_dict['_generate_next_value_'] = getattr(first_enum, '_generate_next_value_', None)
+ return enum_dict
+
+ def __new__(metacls, cls, bases, classdict):
+ # an Enum class is final once enumeration items have been defined; it
+ # cannot be mixed with other types (int, float, etc.) if it has an
+ # inherited __new__ unless a new __new__ is defined (or the resulting
+ # class will fail).
+ member_type, first_enum = metacls._get_mixins_(bases)
+ __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
+ first_enum)
+
+ # save enum items into separate mapping so they don't get baked into
+ # the new class
+ enum_members = {k: classdict[k] for k in classdict._member_names}
+ for name in classdict._member_names:
+ del classdict[name]
+
+ # adjust the sunders
+ _order_ = classdict.pop('_order_', None)
+
+ # check for illegal enum names (any others?)
+ invalid_names = set(enum_members) & {'mro', }
+ if invalid_names:
+ raise ValueError('Invalid enum member name: {0}'.format(
+ ','.join(invalid_names)))
+
+ # create a default docstring if one has not been provided
+ if '__doc__' not in classdict:
+ classdict['__doc__'] = 'An enumeration.'
+
+ # create our new Enum type
+ enum_class = super().__new__(metacls, cls, bases, classdict)
+ enum_class._member_names_ = [] # names in definition order
+ enum_class._member_map_ = OrderedDict() # name->value map
+ enum_class._member_type_ = member_type
+
+ # save attributes from super classes so we know if we can take
+ # the shortcut of storing members in the class dict
+ base_attributes = {a for b in enum_class.mro() for a in b.__dict__}
+
+ # Reverse value->name map for hashable values.
+ enum_class._value2member_map_ = {}
+
+ # If a custom type is mixed into the Enum, and it does not know how
+ # to pickle itself, pickle.dumps will succeed but pickle.loads will
+ # fail. Rather than have the error show up later and possibly far
+ # from the source, sabotage the pickle protocol for this class so
+ # that pickle.dumps also fails.
+ #
+ # However, if the new class implements its own __reduce_ex__, do not
+ # sabotage -- it's on them to make sure it works correctly. We use
+ # __reduce_ex__ instead of any of the others as it is preferred by
+ # pickle over __reduce__, and it handles all pickle protocols.
+ if '__reduce_ex__' not in classdict:
+ if member_type is not object:
+ methods = ('__getnewargs_ex__', '__getnewargs__',
+ '__reduce_ex__', '__reduce__')
+ if not any(m in member_type.__dict__ for m in methods):
+ _make_class_unpicklable(enum_class)
+
+ # instantiate them, checking for duplicates as we go
+ # we instantiate first instead of checking for duplicates first in case
+ # a custom __new__ is doing something funky with the values -- such as
+ # auto-numbering ;)
+ for member_name in classdict._member_names:
+ value = enum_members[member_name]
+ if not isinstance(value, tuple):
+ args = (value, )
+ else:
+ args = value
+ if member_type is tuple: # special case for tuple enums
+ args = (args, ) # wrap it one more time
+ if not use_args:
+ enum_member = __new__(enum_class)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = value
+ else:
+ enum_member = __new__(enum_class, *args)
+ if not hasattr(enum_member, '_value_'):
+ if member_type is object:
+ enum_member._value_ = value
+ else:
+ enum_member._value_ = member_type(*args)
+ value = enum_member._value_
+ enum_member._name_ = member_name
+ enum_member.__objclass__ = enum_class
+ enum_member.__init__(*args)
+ # If another member with the same value was already defined, the
+ # new member becomes an alias to the existing one.
+ for name, canonical_member in enum_class._member_map_.items():
+ if canonical_member._value_ == enum_member._value_:
+ enum_member = canonical_member
+ break
+ else:
+ # Aliases don't appear in member names (only in __members__).
+ enum_class._member_names_.append(member_name)
+ # performance boost for any member that would not shadow
+ # a DynamicClassAttribute
+ if member_name not in base_attributes:
+ setattr(enum_class, member_name, enum_member)
+ # now add to _member_map_
+ enum_class._member_map_[member_name] = enum_member
+ try:
+ # This may fail if value is not hashable. We can't add the value
+ # to the map, and by-value lookups for this value will be
+ # linear.
+ enum_class._value2member_map_[value] = enum_member
+ except TypeError:
+ pass
+
+ # double check that repr and friends are not the mixin's or various
+ # things break (such as pickle)
+ for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+ class_method = getattr(enum_class, name)
+ obj_method = getattr(member_type, name, None)
+ enum_method = getattr(first_enum, name, None)
+ if obj_method is not None and obj_method is class_method:
+ setattr(enum_class, name, enum_method)
+
+ # replace any other __new__ with our own (as long as Enum is not None,
+ # anyway) -- again, this is to support pickle
+ if Enum is not None:
+ # if the user defined their own __new__, save it before it gets
+ # clobbered in case they subclass later
+ if save_new:
+ enum_class.__new_member__ = __new__
+ enum_class.__new__ = Enum.__new__
+
+ # py3 support for definition order (helps keep py2/py3 code in sync)
+ if _order_ is not None:
+ if isinstance(_order_, str):
+ _order_ = _order_.replace(',', ' ').split()
+ if _order_ != enum_class._member_names_:
+ raise TypeError('member order does not match _order_')
+
+ return enum_class
+
+ def __bool__(self):
+ """
+ classes/types should always be True.
+ """
+ return True
+
+ def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1):
+ """Either returns an existing member, or creates a new enum class.
+
+ This method is used both when an enum class is given a value to match
+ to an enumeration member (i.e. Color(3)) and for the functional API
+ (i.e. Color = Enum('Color', names='RED GREEN BLUE')).
+
+ When used for the functional API:
+
+ `value` will be the name of the new class.
+
+ `names` should be either a string of white-space/comma delimited names
+ (values will start at `start`), or an iterator/mapping of name, value pairs.
+
+ `module` should be set to the module this class is being created in;
+ if it is not set, an attempt to find that module will be made, but if
+ it fails the class will not be picklable.
+
+ `qualname` should be set to the actual location this class can be found
+ at in its module; by default it is set to the global scope. If this is
+ not correct, unpickling will fail in some circumstances.
+
+ `type`, if set, will be mixed in as the first base class.
+
+ """
+ if names is None: # simple value lookup
+ return cls.__new__(cls, value)
+ # otherwise, functional API: we're creating a new Enum type
+ return cls._create_(value, names, module=module, qualname=qualname, type=type, start=start)
+
+ def __contains__(cls, member):
+ return isinstance(member, cls) and member._name_ in cls._member_map_
+
+ def __delattr__(cls, attr):
+ # nicer error message when someone tries to delete an attribute
+ # (see issue19025).
+ if attr in cls._member_map_:
+ raise AttributeError(
+ "%s: cannot delete Enum member." % cls.__name__)
+ super().__delattr__(attr)
+
+ def __dir__(self):
+ return (['__class__', '__doc__', '__members__', '__module__'] +
+ self._member_names_)
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+
+ """
+ if _is_dunder(name):
+ raise AttributeError(name)
+ try:
+ return cls._member_map_[name]
+ except KeyError:
+ raise AttributeError(name) from None
+
+ def __getitem__(cls, name):
+ return cls._member_map_[name]
+
+ def __iter__(cls):
+ return (cls._member_map_[name] for name in cls._member_names_)
+
+ def __len__(cls):
+ return len(cls._member_names_)
+
+ @property
+ def __members__(cls):
+ """Returns a mapping of member name->value.
+
+ This mapping lists all enum members, including aliases. Note that this
+ is a read-only view of the internal mapping.
+
+ """
+ return MappingProxyType(cls._member_map_)
+
+ def __repr__(cls):
+ return "<enum %r>" % cls.__name__
+
+ def __reversed__(cls):
+ return (cls._member_map_[name] for name in reversed(cls._member_names_))
+
+ def __setattr__(cls, name, value):
+ """Block attempts to reassign Enum members.
+
+ A simple assignment to the class namespace only changes one of the
+ several possible ways to get an Enum member from the Enum class,
+ resulting in an inconsistent Enumeration.
+
+ """
+ member_map = cls.__dict__.get('_member_map_', {})
+ if name in member_map:
+ raise AttributeError('Cannot reassign members.')
+ super().__setattr__(name, value)
+
+ def _create_(cls, class_name, names=None, *, module=None, qualname=None, type=None, start=1):
+ """Convenience method to create a new Enum class.
+
+ `names` can be:
+
+ * A string containing member names, separated either with spaces or
+ commas. Values are incremented by 1 from `start`.
+ * An iterable of member names. Values are incremented by 1 from `start`.
+ * An iterable of (member name, value) pairs.
+ * A mapping of member name -> value pairs.
+
+ """
+ metacls = cls.__class__
+ bases = (cls, ) if type is None else (type, cls)
+ _, first_enum = cls._get_mixins_(bases)
+ classdict = metacls.__prepare__(class_name, bases)
+
+ # special processing needed for names?
+ if isinstance(names, str):
+ names = names.replace(',', ' ').split()
+ if isinstance(names, (tuple, list)) and names and isinstance(names[0], str):
+ original_names, names = names, []
+ last_values = []
+ for count, name in enumerate(original_names):
+ value = first_enum._generate_next_value_(name, start, count, last_values[:])
+ last_values.append(value)
+ names.append((name, value))
+
+ # Here, names is either an iterable of (name, value) or a mapping.
+ for item in names:
+ if isinstance(item, str):
+ member_name, member_value = item, names[item]
+ else:
+ member_name, member_value = item
+ classdict[member_name] = member_value
+ enum_class = metacls.__new__(metacls, class_name, bases, classdict)
+
+ # TODO: replace the frame hack if a blessed way to know the calling
+ # module is ever developed
+ if module is None:
+ try:
+ module = sys._getframe(2).f_globals['__name__']
+ except (AttributeError, ValueError) as exc:
+ pass
+ if module is None:
+ _make_class_unpicklable(enum_class)
+ else:
+ enum_class.__module__ = module
+ if qualname is not None:
+ enum_class.__qualname__ = qualname
+
+ return enum_class
+
+ @staticmethod
+ def _get_mixins_(bases):
+ """Returns the type for creating enum members, and the first inherited
+ enum class.
+
+ bases: the tuple of bases that was given to __new__
+
+ """
+ if not bases:
+ return object, Enum
+
+ # double check that we are not subclassing a class with existing
+ # enumeration members; while we're at it, see if any other data
+ # type has been mixed in so we can use the correct __new__
+ member_type = first_enum = None
+ for base in bases:
+ if (base is not Enum and
+ issubclass(base, Enum) and
+ base._member_names_):
+ raise TypeError("Cannot extend enumerations")
+ # base is now the last base in bases
+ if not issubclass(base, Enum):
+ raise TypeError("new enumerations must be created as "
+ "`ClassName([mixin_type,] enum_type)`")
+
+ # get correct mix-in type (either mix-in type of Enum subclass, or
+ # first base if last base is Enum)
+ if not issubclass(bases[0], Enum):
+ member_type = bases[0] # first data type
+ first_enum = bases[-1] # enum type
+ else:
+ for base in bases[0].__mro__:
+ # most common: (IntEnum, int, Enum, object)
+ # possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
+ # <class 'int'>, <Enum 'Enum'>,
+ # <class 'object'>)
+ if issubclass(base, Enum):
+ if first_enum is None:
+ first_enum = base
+ else:
+ if member_type is None:
+ member_type = base
+
+ return member_type, first_enum
+
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __new_member__
+ __new__ = classdict.get('__new__', None)
+
+ # should __new__ be saved as __new_member__ later?
+ save_new = __new__ is not None
+
+ if __new__ is None:
+ # check all possibles for __new_member__ before falling back to
+ # __new__
+ for method in ('__new_member__', '__new__'):
+ for possible in (member_type, first_enum):
+ target = getattr(possible, method, None)
+ if target not in {
+ None,
+ None.__new__,
+ object.__new__,
+ Enum.__new__,
+ }:
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, save_new, use_args
+
+class Enum(metaclass=EnumMeta):
+ """Generic enumeration.
+
+ Derive from this class to define new enumerations.
+
+ """
+ def __new__(cls, value):
+ # all enum instances are actually created during class construction
+ # without calling this method; this method is called by the metaclass'
+ # __call__ (i.e. Color(3) ), and by pickle
+ if type(value) is cls:
+ # For lookups like Color(Color.RED)
+ return value
+ # by-value search for a matching enum member
+ # see if it's in the reverse mapping (for hashable values)
+ try:
+ if value in cls._value2member_map_:
+ return cls._value2member_map_[value]
+ except TypeError:
+ # not there, now do long search -- O(n) behavior
+ for member in cls._member_map_.values():
+ if member._value_ == value:
+ return member
+ # still not found -- try _missing_ hook
+ return cls._missing_(value)
+
+ def _generate_next_value_(name, start, count, last_values):
+ for last_value in reversed(last_values):
+ try:
+ return last_value + 1
+ except TypeError:
+ pass
+ else:
+ return start
+
+ @classmethod
+ def _missing_(cls, value):
+ raise ValueError("%r is not a valid %s" % (value, cls.__name__))
+
+ def __repr__(self):
+ return "<%s.%s: %r>" % (
+ self.__class__.__name__, self._name_, self._value_)
+
+ def __str__(self):
+ return "%s.%s" % (self.__class__.__name__, self._name_)
+
+ def __dir__(self):
+ added_behavior = [
+ m
+ for cls in self.__class__.mro()
+ for m in cls.__dict__
+ if m[0] != '_' and m not in self._member_map_
+ ]
+ return (['__class__', '__doc__', '__module__'] + added_behavior)
+
+ def __format__(self, format_spec):
+ # mixed-in Enums should use the mixed-in type's __format__, otherwise
+ # we can get strange results with the Enum name showing up instead of
+ # the value
+
+ # pure Enum branch
+ if self._member_type_ is object:
+ cls = str
+ val = str(self)
+ # mix-in branch
+ else:
+ cls = self._member_type_
+ val = self._value_
+ return cls.__format__(val, format_spec)
+
+ def __hash__(self):
+ return hash(self._name_)
+
+ def __reduce_ex__(self, proto):
+ return self.__class__, (self._value_, )
+
+ # DynamicClassAttribute is used to provide access to the `name` and
+ # `value` properties of enum members while keeping some measure of
+ # protection from modification, while still allowing for an enumeration
+ # to have members named `name` and `value`. This works because enumeration
+ # members are not set directly on the enum class -- __getattr__ is
+ # used to look them up.
+
+ @DynamicClassAttribute
+ def name(self):
+ """The name of the Enum member."""
+ return self._name_
+
+ @DynamicClassAttribute
+ def value(self):
+ """The value of the Enum member."""
+ return self._value_
+
+ @classmethod
+ def _convert(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ # We use an OrderedDict of sorted source keys so that the
+ # _value2member_map is populated in the same order every time
+ # for a consistent reverse mapping of number to name when there
+ # are multiple names for the same number rather than varying
+ # between runs due to hash randomization of the module dictionary.
+ members = [
+ (name, source[name])
+ for name in source.keys()
+ if filter(name)]
+ try:
+ # sort by value
+ members.sort(key=lambda t: (t[1], t[0]))
+ except TypeError:
+ # unless some values aren't comparable, in which case sort by name
+ members.sort(key=lambda t: t[0])
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+
+
+class IntEnum(int, Enum):
+ """Enum where members are also (and must be) ints"""
+
+
+def _reduce_ex_by_name(self, proto):
+ return self.name
+
+class Flag(Enum):
+ """Support for flags"""
+
+ def _generate_next_value_(name, start, count, last_values):
+ """
+ Generate the next value when not given.
+
+ name: the name of the member
+ start: the initital start value or None
+ count: the number of existing members
+ last_value: the last value assigned or None
+ """
+ if not count:
+ return start if start is not None else 1
+ for last_value in reversed(last_values):
+ try:
+ high_bit = _high_bit(last_value)
+ break
+ except Exception:
+ raise TypeError('Invalid Flag value: %r' % last_value) from None
+ return 2 ** (high_bit+1)
+
+ @classmethod
+ def _missing_(cls, value):
+ original_value = value
+ if value < 0:
+ value = ~value
+ possible_member = cls._create_pseudo_member_(value)
+ if original_value < 0:
+ possible_member = ~possible_member
+ return possible_member
+
+ @classmethod
+ def _create_pseudo_member_(cls, value):
+ """
+ Create a composite member iff value contains only members.
+ """
+ pseudo_member = cls._value2member_map_.get(value, None)
+ if pseudo_member is None:
+ # verify all bits are accounted for
+ _, extra_flags = _decompose(cls, value)
+ if extra_flags:
+ raise ValueError("%r is not a valid %s" % (value, cls.__name__))
+ # construct a singleton enum pseudo-member
+ pseudo_member = object.__new__(cls)
+ pseudo_member._name_ = None
+ pseudo_member._value_ = value
+ # use setdefault in case another thread already created a composite
+ # with this value
+ pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
+ return pseudo_member
+
+ def __contains__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return other._value_ & self._value_ == other._value_
+
+ def __repr__(self):
+ cls = self.__class__
+ if self._name_ is not None:
+ return '<%s.%s: %r>' % (cls.__name__, self._name_, self._value_)
+ members, uncovered = _decompose(cls, self._value_)
+ return '<%s.%s: %r>' % (
+ cls.__name__,
+ '|'.join([str(m._name_ or m._value_) for m in members]),
+ self._value_,
+ )
+
+ def __str__(self):
+ cls = self.__class__
+ if self._name_ is not None:
+ return '%s.%s' % (cls.__name__, self._name_)
+ members, uncovered = _decompose(cls, self._value_)
+ if len(members) == 1 and members[0]._name_ is None:
+ return '%s.%r' % (cls.__name__, members[0]._value_)
+ else:
+ return '%s.%s' % (
+ cls.__name__,
+ '|'.join([str(m._name_ or m._value_) for m in members]),
+ )
+
+ def __bool__(self):
+ return bool(self._value_)
+
+ def __or__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self.__class__(self._value_ | other._value_)
+
+ def __and__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self.__class__(self._value_ & other._value_)
+
+ def __xor__(self, other):
+ if not isinstance(other, self.__class__):
+ return NotImplemented
+ return self.__class__(self._value_ ^ other._value_)
+
+ def __invert__(self):
+ members, uncovered = _decompose(self.__class__, self._value_)
+ inverted_members = [
+ m for m in self.__class__
+ if m not in members and not m._value_ & self._value_
+ ]
+ inverted = reduce(_or_, inverted_members, self.__class__(0))
+ return self.__class__(inverted)
+
+
+class IntFlag(int, Flag):
+ """Support for integer-based Flags"""
+
+ @classmethod
+ def _missing_(cls, value):
+ if not isinstance(value, int):
+ raise ValueError("%r is not a valid %s" % (value, cls.__name__))
+ new_member = cls._create_pseudo_member_(value)
+ return new_member
+
+ @classmethod
+ def _create_pseudo_member_(cls, value):
+ pseudo_member = cls._value2member_map_.get(value, None)
+ if pseudo_member is None:
+ need_to_create = [value]
+ # get unaccounted for bits
+ _, extra_flags = _decompose(cls, value)
+ # timer = 10
+ while extra_flags:
+ # timer -= 1
+ bit = _high_bit(extra_flags)
+ flag_value = 2 ** bit
+ if (flag_value not in cls._value2member_map_ and
+ flag_value not in need_to_create
+ ):
+ need_to_create.append(flag_value)
+ if extra_flags == -flag_value:
+ extra_flags = 0
+ else:
+ extra_flags ^= flag_value
+ for value in reversed(need_to_create):
+ # construct singleton pseudo-members
+ pseudo_member = int.__new__(cls, value)
+ pseudo_member._name_ = None
+ pseudo_member._value_ = value
+ # use setdefault in case another thread already created a composite
+ # with this value
+ pseudo_member = cls._value2member_map_.setdefault(value, pseudo_member)
+ return pseudo_member
+
+ def __or__(self, other):
+ if not isinstance(other, (self.__class__, int)):
+ return NotImplemented
+ result = self.__class__(self._value_ | self.__class__(other)._value_)
+ return result
+
+ def __and__(self, other):
+ if not isinstance(other, (self.__class__, int)):
+ return NotImplemented
+ return self.__class__(self._value_ & self.__class__(other)._value_)
+
+ def __xor__(self, other):
+ if not isinstance(other, (self.__class__, int)):
+ return NotImplemented
+ return self.__class__(self._value_ ^ self.__class__(other)._value_)
+
+ __ror__ = __or__
+ __rand__ = __and__
+ __rxor__ = __xor__
+
+ def __invert__(self):
+ result = self.__class__(~self._value_)
+ return result
+
+
+def _high_bit(value):
+ """returns index of highest bit, or -1 if value is zero or negative"""
+ return value.bit_length() - 1
+
+def unique(enumeration):
+ """Class decorator for enumerations ensuring unique member values."""
+ duplicates = []
+ for name, member in enumeration.__members__.items():
+ if name != member.name:
+ duplicates.append((name, member.name))
+ if duplicates:
+ alias_details = ', '.join(
+ ["%s -> %s" % (alias, name) for (alias, name) in duplicates])
+ raise ValueError('duplicate values found in %r: %s' %
+ (enumeration, alias_details))
+ return enumeration
+
+def _decompose(flag, value):
+ """Extract all members from the value."""
+ # _decompose is only called if the value is not named
+ not_covered = value
+ negative = value < 0
+ # issue29167: wrap accesses to _value2member_map_ in a list to avoid race
+ # conditions between iterating over it and having more psuedo-
+ # members added to it
+ if negative:
+ # only check for named flags
+ flags_to_check = [
+ (m, v)
+ for v, m in list(flag._value2member_map_.items())
+ if m.name is not None
+ ]
+ else:
+ # check for named flags and powers-of-two flags
+ flags_to_check = [
+ (m, v)
+ for v, m in list(flag._value2member_map_.items())
+ if m.name is not None or _power_of_two(v)
+ ]
+ members = []
+ for member, member_value in flags_to_check:
+ if member_value and member_value & value == member_value:
+ members.append(member)
+ not_covered &= ~member_value
+ if not members and value in flag._value2member_map_:
+ members.append(flag._value2member_map_[value])
+ members.sort(key=lambda m: m._value_, reverse=True)
+ if len(members) > 1 and members[0].value == value:
+ # we have the breakdown, don't need the value member itself
+ members.pop(0)
+ return members, not_covered
+
+def _power_of_two(value):
+ if value < 1:
+ return False
+ return value == 2 ** _high_bit(value)
diff --git a/modules/language/python/module/io.scm b/modules/language/python/module/io.scm
index dceced8..87a2ec8 100644
--- a/modules/language/python/module/io.scm
+++ b/modules/language/python/module/io.scm
@@ -411,8 +411,13 @@
(set self 'raw raw)))
(define peek
- (lambda (self)
- (raise UnsupportedOperation peek))))
+ (lambda* (self #:optional (size 1))
+ (check (self port)
+ (list->string
+ (let ((r (peek-char port)))
+ (if (char? r)
+ (list r)
+ (list))))))))
(define-python-class BufferedWriter (BufferedIOBase)
(define __init__
diff --git a/modules/language/python/module/types.scm b/modules/language/python/module/types.scm
new file mode 100644
index 0000000..840b901
--- /dev/null
+++ b/modules/language/python/module/types.scm
@@ -0,0 +1,369 @@
+(define-module (language python module types)
+ #:use-module (oop goops)
+ #:use-module (oop pf-objects)
+ #:use-module (language python exceptions)
+ #:use-module (language python def)
+ #:use-module (language python bool)
+ #:use-module ((language python module python)
+ #:select (getattr type))
+ #:export (MappingProxyType DynamicClassAttribute Functiontype LambdaType))
+
+"""
+Define names for built-in types that aren't directly accessible as a builtin.
+"""
+(define MappingProxyType <hashtable>)
+(define FunctionType <procedure>)
+(define LambdaType <procedure>)
+
+(define-python-class DynamicClassAttribute ()
+ "Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ This allows one to have properties active on an instance, and have virtual
+ attributes on the class with the same name (see Enum for an example).
+ "
+
+ (define __init__
+ (lam (self (= fget None) (= fset None) (= fdel None) (= doc None))
+ (set self 'fget fget)
+ (set self 'fset fset)
+ (set self 'fdel fdel)
+
+ ;; next two lines make DynamicClassAttribute act the same as property
+ (set self '__doc__ (or (bool doc) (ref fget '__doc__)))
+ (set self 'overwrite_doc (eq? doc None))
+
+ ;; support for abstract methods
+ (set self '__isabstractmethod__
+ (bool (getattr fget '__isabstractmethod__' #f)))))
+
+ (define __get__
+ (lam (self instance (= ownerclass None))
+ (cond
+ ((eq? instance None)
+ (if (bool (ref self '__isabstractmethod__))
+ self
+ (raise (AttributeError))))
+ ((eq? (ref self 'fget) None)
+ (raise (AttributeError "unreadable attribute")))
+ (else
+ ((ref self 'fget) instance)))))
+
+ (define __set__
+ (lambda (self instance value)
+ (if (eq? (ref self 'fset) None)
+ (raise (AttributeError "can't set attribute"))
+ ((ref self 'fset) instance value))))
+
+ (define __delete__
+ (lambda (self instance)
+ (if (eq? (ref self 'fdel) None)
+ (raise (AttributeError "can't delete attribute"))
+ ((ref self 'fdel) instance))))
+
+ (define getter
+ (lambda (self fget)
+ (let* ((fdoc (if (ref self 'overwrite_doc)
+ (ref fget '__doc__)
+ None))
+ (result ((type self)
+ fget
+ (ref self 'fset)
+ (ref self 'fdel)
+ (or (bool fdoc) (ref self '__doc__)))))
+
+ (set result 'overwrite_doc (ref self 'overwrite_doc))
+ result)))
+
+ (define setter
+ (lambda (self fset)
+ (let ((result ((type self)
+ (ref self 'fget)
+ fset
+ (ref self 'fdel)
+ (ref self '__doc__))))
+
+ (set result 'overwrite_doc (ref self 'overwrite_doc))
+ result)))
+
+ (define deleter
+ (lambda (self fdel)
+ (let ((result ((type self)
+ (ref self 'fget)
+ (ref self 'fset)
+ fdel
+ (ref self '__doc__))))
+
+ (set result 'overwrite_doc (ref self 'overwrite_doc))
+ result))))
+
+#|
+import sys
+
+# Iterators in Python aren't a matter of type but of protocol. A large
+# and changing number of builtin types implement *some* flavor of
+# iterator. Don't check the type! Use hasattr to check for both
+# "__iter__" and "__next__" attributes instead.
+
+def _f(): pass
+FunctionType = type(_f)
+LambdaType = type(lambda: None) # Same as FunctionType
+MappingProxyType = type(type.__dict__)
+CodeType = type(_f.__code__)
+
+SimpleNamespace = type(sys.implementation)
+
+def _g():
+ yield 1
+GeneratorType = type(_g())
+
+async def _c(): pass
+_c = _c()
+CoroutineType = type(_c)
+_c.close() # Prevent ResourceWarning
+
+async def _ag():
+ yield
+_ag = _ag()
+AsyncGeneratorType = type(_ag)
+
+class _C:
+ def _m(self): pass
+MethodType = type(_C()._m)
+
+BuiltinFunctionType = type(len)
+BuiltinMethodType = type([].append) # Same as BuiltinFunctionType
+
+ModuleType = type(sys)
+
+try:
+ raise TypeError
+except TypeError:
+ tb = sys.exc_info()[2]
+ TracebackType = type(tb)
+ FrameType = type(tb.tb_frame)
+ tb = None; del tb
+
+# For Jython, the following two types are identical
+GetSetDescriptorType = type(FunctionType.__code__)
+MemberDescriptorType = type(FunctionType.__globals__)
+
+del sys, _f, _g, _C, _c, # Not for export
+
+
+# Provide a PEP 3115 compliant mechanism for class creation
+def new_class(name, bases=(), kwds=None, exec_body=None):
+ """Create a class object dynamically using the appropriate metaclass."""
+ meta, ns, kwds = prepare_class(name, bases, kwds)
+ if exec_body is not None:
+ exec_body(ns)
+ return meta(name, bases, ns, **kwds)
+
+def prepare_class(name, bases=(), kwds=None):
+ """Call the __prepare__ method of the appropriate metaclass.
+
+ Returns (metaclass, namespace, kwds) as a 3-tuple
+
+ *metaclass* is the appropriate metaclass
+ *namespace* is the prepared class namespace
+ *kwds* is an updated copy of the passed in kwds argument with any
+ 'metaclass' entry removed. If no kwds argument is passed in, this will
+ be an empty dict.
+ """
+ if kwds is None:
+ kwds = {}
+ else:
+ kwds = dict(kwds) # Don't alter the provided mapping
+ if 'metaclass' in kwds:
+ meta = kwds.pop('metaclass')
+ else:
+ if bases:
+ meta = type(bases[0])
+ else:
+ meta = type
+ if isinstance(meta, type):
+ # when meta is a type, we first determine the most-derived metaclass
+ # instead of invoking the initial candidate directly
+ meta = _calculate_meta(meta, bases)
+ if hasattr(meta, '__prepare__'):
+ ns = meta.__prepare__(name, bases, **kwds)
+ else:
+ ns = {}
+ return meta, ns, kwds
+
+def _calculate_meta(meta, bases):
+ """Calculate the most derived metaclass."""
+ winner = meta
+ for base in bases:
+ base_meta = type(base)
+ if issubclass(winner, base_meta):
+ continue
+ if issubclass(base_meta, winner):
+ winner = base_meta
+ continue
+ # else:
+ raise TypeError("metaclass conflict: "
+ "the metaclass of a derived class "
+ "must be a (non-strict) subclass "
+ "of the metaclasses of all its bases")
+ return winner
+
+class DynamicClassAttribute:
+ """Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ This allows one to have properties active on an instance, and have virtual
+ attributes on the class with the same name (see Enum for an example).
+
+ """
+ def __init__(self, fget=None, fset=None, fdel=None, doc=None):
+ self.fget = fget
+ self.fset = fset
+ self.fdel = fdel
+ # next two lines make DynamicClassAttribute act the same as property
+ self.__doc__ = doc or fget.__doc__
+ self.overwrite_doc = doc is None
+ # support for abstract methods
+ self.__isabstractmethod__ = bool(getattr(fget, '__isabstractmethod__', False))
+
+ def __get__(self, instance, ownerclass=None):
+ if instance is None:
+ if self.__isabstractmethod__:
+ return self
+ raise AttributeError()
+ elif self.fget is None:
+ raise AttributeError("unreadable attribute")
+ return self.fget(instance)
+
+ def __set__(self, instance, value):
+ if self.fset is None:
+ raise AttributeError("can't set attribute")
+ self.fset(instance, value)
+
+ def __delete__(self, instance):
+ if self.fdel is None:
+ raise AttributeError("can't delete attribute")
+ self.fdel(instance)
+
+ def getter(self, fget):
+ fdoc = fget.__doc__ if self.overwrite_doc else None
+ result = type(self)(fget, self.fset, self.fdel, fdoc or self.__doc__)
+ result.overwrite_doc = self.overwrite_doc
+ return result
+
+ def setter(self, fset):
+ result = type(self)(self.fget, fset, self.fdel, self.__doc__)
+ result.overwrite_doc = self.overwrite_doc
+ return result
+
+ def deleter(self, fdel):
+ result = type(self)(self.fget, self.fset, fdel, self.__doc__)
+ result.overwrite_doc = self.overwrite_doc
+ return result
+
+
+import functools as _functools
+import collections.abc as _collections_abc
+
+class _GeneratorWrapper:
+ # TODO: Implement this in C.
+ def __init__(self, gen):
+ self.__wrapped = gen
+ self.__isgen = gen.__class__ is GeneratorType
+ self.__name__ = getattr(gen, '__name__', None)
+ self.__qualname__ = getattr(gen, '__qualname__', None)
+ def send(self, val):
+ return self.__wrapped.send(val)
+ def throw(self, tp, *rest):
+ return self.__wrapped.throw(tp, *rest)
+ def close(self):
+ return self.__wrapped.close()
+ @property
+ def gi_code(self):
+ return self.__wrapped.gi_code
+ @property
+ def gi_frame(self):
+ return self.__wrapped.gi_frame
+ @property
+ def gi_running(self):
+ return self.__wrapped.gi_running
+ @property
+ def gi_yieldfrom(self):
+ return self.__wrapped.gi_yieldfrom
+ cr_code = gi_code
+ cr_frame = gi_frame
+ cr_running = gi_running
+ cr_await = gi_yieldfrom
+ def __next__(self):
+ return next(self.__wrapped)
+ def __iter__(self):
+ if self.__isgen:
+ return self.__wrapped
+ return self
+ __await__ = __iter__
+
+def coroutine(func):
+ """Convert regular generator function to a coroutine."""
+
+ if not callable(func):
+ raise TypeError('types.coroutine() expects a callable')
+
+ if (func.__class__ is FunctionType and
+ getattr(func, '__code__', None).__class__ is CodeType):
+
+ co_flags = func.__code__.co_flags
+
+ # Check if 'func' is a coroutine function.
+ # (0x180 == CO_COROUTINE | CO_ITERABLE_COROUTINE)
+ if co_flags & 0x180:
+ return func
+
+ # Check if 'func' is a generator function.
+ # (0x20 == CO_GENERATOR)
+ if co_flags & 0x20:
+ # TODO: Implement this in C.
+ co = func.__code__
+ func.__code__ = CodeType(
+ co.co_argcount, co.co_kwonlyargcount, co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags | 0x100, # 0x100 == CO_ITERABLE_COROUTINE
+ co.co_code,
+ co.co_consts, co.co_names, co.co_varnames, co.co_filename,
+ co.co_name, co.co_firstlineno, co.co_lnotab, co.co_freevars,
+ co.co_cellvars)
+ return func
+
+ # The following code is primarily to support functions that
+ # return generator-like objects (for instance generators
+ # compiled with Cython).
+
+ @_functools.wraps(func)
+ def wrapped(*args, **kwargs):
+ coro = func(*args, **kwargs)
+ if (coro.__class__ is CoroutineType or
+ coro.__class__ is GeneratorType and coro.gi_code.co_flags & 0x100):
+ # 'coro' is a native coroutine object or an iterable coroutine
+ return coro
+ if (isinstance(coro, _collections_abc.Generator) and
+ not isinstance(coro, _collections_abc.Coroutine)):
+ # 'coro' is either a pure Python generator iterator, or it
+ # implements collections.abc.Generator (and does not implement
+ # collections.abc.Coroutine).
+ return _GeneratorWrapper(coro)
+ # 'coro' is either an instance of collections.abc.Coroutine or
+ # some other object -- pass it through.
+ return coro
+
+ return wrapped
+
+
+__all__ = [n for n in globals() if n[:1] != '_']
+|#