" % (ensure_new_type(self.faultCode),
+ ensure_new_type(self.faultString))
+
+# --------------------------------------------------------------------
+# Special values
+
+##
+# Backwards compatibility
+
+boolean = Boolean = bool
+
+##
+# Wrapper for XML-RPC DateTime values. This converts a time value to
+# the format used by XML-RPC.
+#
+# The value can be given as a datetime object, as a string in the
+# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by
+# time.localtime()), or an integer value (as returned by time.time()).
+# The wrapper uses time.localtime() to convert an integer to a time
+# tuple.
+#
+# @param value The time, given as a datetime object, an ISO 8601 string,
+# a time tuple, or an integer time value.
+
+
+### For Python-Future:
+def _iso8601_format(value):
+ return "%04d%02d%02dT%02d:%02d:%02d" % (
+ value.year, value.month, value.day,
+ value.hour, value.minute, value.second)
+###
+# Issue #13305: different format codes across platforms
+# _day0 = datetime(1, 1, 1)
+# if _day0.strftime('%Y') == '0001': # Mac OS X
+# def _iso8601_format(value):
+# return value.strftime("%Y%m%dT%H:%M:%S")
+# elif _day0.strftime('%4Y') == '0001': # Linux
+# def _iso8601_format(value):
+# return value.strftime("%4Y%m%dT%H:%M:%S")
+# else:
+# def _iso8601_format(value):
+# return value.strftime("%Y%m%dT%H:%M:%S").zfill(17)
+# del _day0
+
+
+def _strftime(value):
+ if isinstance(value, datetime):
+ return _iso8601_format(value)
+
+ if not isinstance(value, (tuple, time.struct_time)):
+ if value == 0:
+ value = time.time()
+ value = time.localtime(value)
+
+ return "%04d%02d%02dT%02d:%02d:%02d" % value[:6]
+
+class DateTime(object):
+ """DateTime wrapper for an ISO 8601 string or time tuple or
+ localtime integer value to generate 'dateTime.iso8601' XML-RPC
+ value.
+ """
+
+ def __init__(self, value=0):
+ if isinstance(value, str):
+ self.value = value
+ else:
+ self.value = _strftime(value)
+
+ def make_comparable(self, other):
+ if isinstance(other, DateTime):
+ s = self.value
+ o = other.value
+ elif isinstance(other, datetime):
+ s = self.value
+ o = _iso8601_format(other)
+ elif isinstance(other, str):
+ s = self.value
+ o = other
+ elif hasattr(other, "timetuple"):
+ s = self.timetuple()
+ o = other.timetuple()
+ else:
+ otype = (hasattr(other, "__class__")
+ and other.__class__.__name__
+ or type(other))
+ raise TypeError("Can't compare %s and %s" %
+ (self.__class__.__name__, otype))
+ return s, o
+
+ def __lt__(self, other):
+ s, o = self.make_comparable(other)
+ return s < o
+
+ def __le__(self, other):
+ s, o = self.make_comparable(other)
+ return s <= o
+
+ def __gt__(self, other):
+ s, o = self.make_comparable(other)
+ return s > o
+
+ def __ge__(self, other):
+ s, o = self.make_comparable(other)
+ return s >= o
+
+ def __eq__(self, other):
+ s, o = self.make_comparable(other)
+ return s == o
+
+ def __ne__(self, other):
+ s, o = self.make_comparable(other)
+ return s != o
+
+ def timetuple(self):
+ return time.strptime(self.value, "%Y%m%dT%H:%M:%S")
+
+ ##
+ # Get date/time value.
+ #
+ # @return Date/time value, as an ISO 8601 string.
+
+ def __str__(self):
+ return self.value
+
+ def __repr__(self):
+ return "" % (ensure_new_type(self.value), id(self))
+
+ def decode(self, data):
+ self.value = str(data).strip()
+
+ def encode(self, out):
+ out.write("")
+ out.write(self.value)
+ out.write(" \n")
+
+def _datetime(data):
+ # decode xml element contents into a DateTime structure.
+ value = DateTime()
+ value.decode(data)
+ return value
+
+def _datetime_type(data):
+ return datetime.strptime(data, "%Y%m%dT%H:%M:%S")
+
+##
+# Wrapper for binary data. This can be used to transport any kind
+# of binary data over XML-RPC, using BASE64 encoding.
+#
+# @param data An 8-bit string containing arbitrary data.
+
+class Binary(object):
+ """Wrapper for binary data."""
+
+ def __init__(self, data=None):
+ if data is None:
+ data = b""
+ else:
+ if not isinstance(data, (bytes, bytearray)):
+ raise TypeError("expected bytes or bytearray, not %s" %
+ data.__class__.__name__)
+ data = bytes(data) # Make a copy of the bytes!
+ self.data = data
+
+ ##
+ # Get buffer contents.
+ #
+ # @return Buffer contents, as an 8-bit string.
+
+ def __str__(self):
+ return str(self.data, "latin-1") # XXX encoding?!
+
+ def __eq__(self, other):
+ if isinstance(other, Binary):
+ other = other.data
+ return self.data == other
+
+ def __ne__(self, other):
+ if isinstance(other, Binary):
+ other = other.data
+ return self.data != other
+
+ def decode(self, data):
+ self.data = base64.decodebytes(data)
+
+ def encode(self, out):
+ out.write("\n")
+ encoded = base64.encodebytes(self.data)
+ out.write(encoded.decode('ascii'))
+ out.write(" \n")
+
+def _binary(data):
+ # decode xml element contents into a Binary structure
+ value = Binary()
+ value.decode(data)
+ return value
+
+WRAPPERS = (DateTime, Binary)
+
+# --------------------------------------------------------------------
+# XML parsers
+
+class ExpatParser(object):
+ # fast expat parser for Python 2.0 and later.
+ def __init__(self, target):
+ self._parser = parser = expat.ParserCreate(None, None)
+ self._target = target
+ parser.StartElementHandler = target.start
+ parser.EndElementHandler = target.end
+ parser.CharacterDataHandler = target.data
+ encoding = None
+ target.xml(encoding, None)
+
+ def feed(self, data):
+ self._parser.Parse(data, 0)
+
+ def close(self):
+ self._parser.Parse("", 1) # end of data
+ del self._target, self._parser # get rid of circular references
+
+# --------------------------------------------------------------------
+# XML-RPC marshalling and unmarshalling code
+
+##
+# XML-RPC marshaller.
+#
+# @param encoding Default encoding for 8-bit strings. The default
+# value is None (interpreted as UTF-8).
+# @see dumps
+
+class Marshaller(object):
+ """Generate an XML-RPC params chunk from a Python data structure.
+
+ Create a Marshaller instance for each set of parameters, and use
+ the "dumps" method to convert your data (represented as a tuple)
+ to an XML-RPC params chunk. To write a fault response, pass a
+ Fault instance instead. You may prefer to use the "dumps" module
+ function for this purpose.
+ """
+
+ # by the way, if you don't understand what's going on in here,
+ # that's perfectly ok.
+
+ def __init__(self, encoding=None, allow_none=False):
+ self.memo = {}
+ self.data = None
+ self.encoding = encoding
+ self.allow_none = allow_none
+
+ dispatch = {}
+
+ def dumps(self, values):
+ out = []
+ write = out.append
+ dump = self.__dump
+ if isinstance(values, Fault):
+ # fault instance
+ write("\n")
+ dump({'faultCode': values.faultCode,
+ 'faultString': values.faultString},
+ write)
+ write(" \n")
+ else:
+ # parameter block
+ # FIXME: the xml-rpc specification allows us to leave out
+ # the entire block if there are no parameters.
+ # however, changing this may break older code (including
+ # old versions of xmlrpclib.py), so this is better left as
+ # is for now. See @XMLRPC3 for more information. /F
+ write("\n")
+ for v in values:
+ write(" \n")
+ dump(v, write)
+ write("\n")
+ write(" \n")
+ result = "".join(out)
+ return str(result)
+
+ def __dump(self, value, write):
+ try:
+ f = self.dispatch[type(ensure_new_type(value))]
+ except KeyError:
+ # check if this object can be marshalled as a structure
+ if not hasattr(value, '__dict__'):
+ raise TypeError("cannot marshal %s objects" % type(value))
+ # check if this class is a sub-class of a basic type,
+ # because we don't know how to marshal these types
+ # (e.g. a string sub-class)
+ for type_ in type(value).__mro__:
+ if type_ in self.dispatch.keys():
+ raise TypeError("cannot marshal %s objects" % type(value))
+ # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix
+ # for the p3yk merge, this should probably be fixed more neatly.
+ f = self.dispatch["_arbitrary_instance"]
+ f(self, value, write)
+
+ def dump_nil (self, value, write):
+ if not self.allow_none:
+ raise TypeError("cannot marshal None unless allow_none is enabled")
+ write(" ")
+ dispatch[type(None)] = dump_nil
+
+ def dump_bool(self, value, write):
+ write("")
+ write(value and "1" or "0")
+ write(" \n")
+ dispatch[bool] = dump_bool
+
+ def dump_long(self, value, write):
+ if value > MAXINT or value < MININT:
+ raise OverflowError("long int exceeds XML-RPC limits")
+ write("")
+ write(str(int(value)))
+ write(" \n")
+ dispatch[int] = dump_long
+
+ # backward compatible
+ dump_int = dump_long
+
+ def dump_double(self, value, write):
+ write("")
+ write(repr(ensure_new_type(value)))
+ write(" \n")
+ dispatch[float] = dump_double
+
+ def dump_unicode(self, value, write, escape=escape):
+ write("")
+ write(escape(value))
+ write(" \n")
+ dispatch[str] = dump_unicode
+
+ def dump_bytes(self, value, write):
+ write("\n")
+ encoded = base64.encodebytes(value)
+ write(encoded.decode('ascii'))
+ write(" \n")
+ dispatch[bytes] = dump_bytes
+ dispatch[bytearray] = dump_bytes
+
+ def dump_array(self, value, write):
+ i = id(value)
+ if i in self.memo:
+ raise TypeError("cannot marshal recursive sequences")
+ self.memo[i] = None
+ dump = self.__dump
+ write("\n")
+ for v in value:
+ dump(v, write)
+ write(" \n")
+ del self.memo[i]
+ dispatch[tuple] = dump_array
+ dispatch[list] = dump_array
+
+ def dump_struct(self, value, write, escape=escape):
+ i = id(value)
+ if i in self.memo:
+ raise TypeError("cannot marshal recursive dictionaries")
+ self.memo[i] = None
+ dump = self.__dump
+ write("\n")
+ for k, v in value.items():
+ write("\n")
+ if not isinstance(k, str):
+ raise TypeError("dictionary key must be string")
+ write("%s \n" % escape(k))
+ dump(v, write)
+ write(" \n")
+ write(" \n")
+ del self.memo[i]
+ dispatch[dict] = dump_struct
+
+ def dump_datetime(self, value, write):
+ write("")
+ write(_strftime(value))
+ write(" \n")
+ dispatch[datetime] = dump_datetime
+
+ def dump_instance(self, value, write):
+ # check for special wrappers
+ if value.__class__ in WRAPPERS:
+ self.write = write
+ value.encode(self)
+ del self.write
+ else:
+ # store instance attributes as a struct (really?)
+ self.dump_struct(value.__dict__, write)
+ dispatch[DateTime] = dump_instance
+ dispatch[Binary] = dump_instance
+ # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix
+ # for the p3yk merge, this should probably be fixed more neatly.
+ dispatch["_arbitrary_instance"] = dump_instance
+
+##
+# XML-RPC unmarshaller.
+#
+# @see loads
+
+class Unmarshaller(object):
+ """Unmarshal an XML-RPC response, based on incoming XML event
+ messages (start, data, end). Call close() to get the resulting
+ data structure.
+
+ Note that this reader is fairly tolerant, and gladly accepts bogus
+ XML-RPC data without complaining (but not bogus XML).
+ """
+
+ # and again, if you don't understand what's going on in here,
+ # that's perfectly ok.
+
+ def __init__(self, use_datetime=False, use_builtin_types=False):
+ self._type = None
+ self._stack = []
+ self._marks = []
+ self._data = []
+ self._methodname = None
+ self._encoding = "utf-8"
+ self.append = self._stack.append
+ self._use_datetime = use_builtin_types or use_datetime
+ self._use_bytes = use_builtin_types
+
+ def close(self):
+ # return response tuple and target method
+ if self._type is None or self._marks:
+ raise ResponseError()
+ if self._type == "fault":
+ raise Fault(**self._stack[0])
+ return tuple(self._stack)
+
+ def getmethodname(self):
+ return self._methodname
+
+ #
+ # event handlers
+
+ def xml(self, encoding, standalone):
+ self._encoding = encoding
+ # FIXME: assert standalone == 1 ???
+
+ def start(self, tag, attrs):
+ # prepare to handle this element
+ if tag == "array" or tag == "struct":
+ self._marks.append(len(self._stack))
+ self._data = []
+ self._value = (tag == "value")
+
+ def data(self, text):
+ self._data.append(text)
+
+ def end(self, tag):
+ # call the appropriate end tag handler
+ try:
+ f = self.dispatch[tag]
+ except KeyError:
+ pass # unknown tag ?
+ else:
+ return f(self, "".join(self._data))
+
+ #
+ # accelerator support
+
+ def end_dispatch(self, tag, data):
+ # dispatch data
+ try:
+ f = self.dispatch[tag]
+ except KeyError:
+ pass # unknown tag ?
+ else:
+ return f(self, data)
+
+ #
+ # element decoders
+
+ dispatch = {}
+
+ def end_nil (self, data):
+ self.append(None)
+ self._value = 0
+ dispatch["nil"] = end_nil
+
+ def end_boolean(self, data):
+ if data == "0":
+ self.append(False)
+ elif data == "1":
+ self.append(True)
+ else:
+ raise TypeError("bad boolean value")
+ self._value = 0
+ dispatch["boolean"] = end_boolean
+
+ def end_int(self, data):
+ self.append(int(data))
+ self._value = 0
+ dispatch["i4"] = end_int
+ dispatch["i8"] = end_int
+ dispatch["int"] = end_int
+
+ def end_double(self, data):
+ self.append(float(data))
+ self._value = 0
+ dispatch["double"] = end_double
+
+ def end_string(self, data):
+ if self._encoding:
+ data = data.decode(self._encoding)
+ self.append(data)
+ self._value = 0
+ dispatch["string"] = end_string
+ dispatch["name"] = end_string # struct keys are always strings
+
+ def end_array(self, data):
+ mark = self._marks.pop()
+ # map arrays to Python lists
+ self._stack[mark:] = [self._stack[mark:]]
+ self._value = 0
+ dispatch["array"] = end_array
+
+ def end_struct(self, data):
+ mark = self._marks.pop()
+ # map structs to Python dictionaries
+ dict = {}
+ items = self._stack[mark:]
+ for i in range(0, len(items), 2):
+ dict[items[i]] = items[i+1]
+ self._stack[mark:] = [dict]
+ self._value = 0
+ dispatch["struct"] = end_struct
+
+ def end_base64(self, data):
+ value = Binary()
+ value.decode(data.encode("ascii"))
+ if self._use_bytes:
+ value = value.data
+ self.append(value)
+ self._value = 0
+ dispatch["base64"] = end_base64
+
+ def end_dateTime(self, data):
+ value = DateTime()
+ value.decode(data)
+ if self._use_datetime:
+ value = _datetime_type(data)
+ self.append(value)
+ dispatch["dateTime.iso8601"] = end_dateTime
+
+ def end_value(self, data):
+ # if we stumble upon a value element with no internal
+ # elements, treat it as a string element
+ if self._value:
+ self.end_string(data)
+ dispatch["value"] = end_value
+
+ def end_params(self, data):
+ self._type = "params"
+ dispatch["params"] = end_params
+
+ def end_fault(self, data):
+ self._type = "fault"
+ dispatch["fault"] = end_fault
+
+ def end_methodName(self, data):
+ if self._encoding:
+ data = data.decode(self._encoding)
+ self._methodname = data
+ self._type = "methodName" # no params
+ dispatch["methodName"] = end_methodName
+
+## Multicall support
+#
+
+class _MultiCallMethod(object):
+ # some lesser magic to store calls made to a MultiCall object
+ # for batch execution
+ def __init__(self, call_list, name):
+ self.__call_list = call_list
+ self.__name = name
+ def __getattr__(self, name):
+ return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name))
+ def __call__(self, *args):
+ self.__call_list.append((self.__name, args))
+
+class MultiCallIterator(object):
+ """Iterates over the results of a multicall. Exceptions are
+ raised in response to xmlrpc faults."""
+
+ def __init__(self, results):
+ self.results = results
+
+ def __getitem__(self, i):
+ item = self.results[i]
+ if isinstance(type(item), dict):
+ raise Fault(item['faultCode'], item['faultString'])
+ elif type(item) == type([]):
+ return item[0]
+ else:
+ raise ValueError("unexpected type in multicall result")
+
+class MultiCall(object):
+ """server -> a object used to boxcar method calls
+
+ server should be a ServerProxy object.
+
+ Methods can be added to the MultiCall using normal
+ method call syntax e.g.:
+
+ multicall = MultiCall(server_proxy)
+ multicall.add(2,3)
+ multicall.get_address("Guido")
+
+ To execute the multicall, call the MultiCall object e.g.:
+
+ add_result, address = multicall()
+ """
+
+ def __init__(self, server):
+ self.__server = server
+ self.__call_list = []
+
+ def __repr__(self):
+ return "" % id(self)
+
+ __str__ = __repr__
+
+ def __getattr__(self, name):
+ return _MultiCallMethod(self.__call_list, name)
+
+ def __call__(self):
+ marshalled_list = []
+ for name, args in self.__call_list:
+ marshalled_list.append({'methodName' : name, 'params' : args})
+
+ return MultiCallIterator(self.__server.system.multicall(marshalled_list))
+
+# --------------------------------------------------------------------
+# convenience functions
+
+FastMarshaller = FastParser = FastUnmarshaller = None
+
+##
+# Create a parser object, and connect it to an unmarshalling instance.
+# This function picks the fastest available XML parser.
+#
+# return A (parser, unmarshaller) tuple.
+
+def getparser(use_datetime=False, use_builtin_types=False):
+ """getparser() -> parser, unmarshaller
+
+ Create an instance of the fastest available parser, and attach it
+ to an unmarshalling object. Return both objects.
+ """
+ if FastParser and FastUnmarshaller:
+ if use_builtin_types:
+ mkdatetime = _datetime_type
+ mkbytes = base64.decodebytes
+ elif use_datetime:
+ mkdatetime = _datetime_type
+ mkbytes = _binary
+ else:
+ mkdatetime = _datetime
+ mkbytes = _binary
+ target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault)
+ parser = FastParser(target)
+ else:
+ target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types)
+ if FastParser:
+ parser = FastParser(target)
+ else:
+ parser = ExpatParser(target)
+ return parser, target
+
+##
+# Convert a Python tuple or a Fault instance to an XML-RPC packet.
+#
+# @def dumps(params, **options)
+# @param params A tuple or Fault instance.
+# @keyparam methodname If given, create a methodCall request for
+# this method name.
+# @keyparam methodresponse If given, create a methodResponse packet.
+# If used with a tuple, the tuple must be a singleton (that is,
+# it must contain exactly one element).
+# @keyparam encoding The packet encoding.
+# @return A string containing marshalled data.
+
+def dumps(params, methodname=None, methodresponse=None, encoding=None,
+ allow_none=False):
+ """data [,options] -> marshalled data
+
+ Convert an argument tuple or a Fault instance to an XML-RPC
+ request (or response, if the methodresponse option is used).
+
+ In addition to the data object, the following options can be given
+ as keyword arguments:
+
+ methodname: the method name for a methodCall packet
+
+ methodresponse: true to create a methodResponse packet.
+ If this option is used with a tuple, the tuple must be
+ a singleton (i.e. it can contain only one element).
+
+ encoding: the packet encoding (default is UTF-8)
+
+ All byte strings in the data structure are assumed to use the
+ packet encoding. Unicode strings are automatically converted,
+ where necessary.
+ """
+
+ assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance"
+ if isinstance(params, Fault):
+ methodresponse = 1
+ elif methodresponse and isinstance(params, tuple):
+ assert len(params) == 1, "response tuple must be a singleton"
+
+ if not encoding:
+ encoding = "utf-8"
+
+ if FastMarshaller:
+ m = FastMarshaller(encoding)
+ else:
+ m = Marshaller(encoding, allow_none)
+
+ data = m.dumps(params)
+
+ if encoding != "utf-8":
+ xmlheader = "\n" % str(encoding)
+ else:
+ xmlheader = "\n" # utf-8 is default
+
+ # standard XML-RPC wrappings
+ if methodname:
+ # a method call
+ if not isinstance(methodname, str):
+ methodname = methodname.encode(encoding)
+ data = (
+ xmlheader,
+ "\n"
+ "", methodname, " \n",
+ data,
+ " \n"
+ )
+ elif methodresponse:
+ # a method response, or a fault structure
+ data = (
+ xmlheader,
+ "\n",
+ data,
+ " \n"
+ )
+ else:
+ return data # return as is
+ return str("").join(data)
+
+##
+# Convert an XML-RPC packet to a Python object. If the XML-RPC packet
+# represents a fault condition, this function raises a Fault exception.
+#
+# @param data An XML-RPC packet, given as an 8-bit string.
+# @return A tuple containing the unpacked data, and the method name
+# (None if not present).
+# @see Fault
+
+def loads(data, use_datetime=False, use_builtin_types=False):
+ """data -> unmarshalled data, method name
+
+ Convert an XML-RPC packet to unmarshalled data plus a method
+ name (None if not present).
+
+ If the XML-RPC packet represents a fault condition, this function
+ raises a Fault exception.
+ """
+ p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types)
+ p.feed(data)
+ p.close()
+ return u.close(), u.getmethodname()
+
+##
+# Encode a string using the gzip content encoding such as specified by the
+# Content-Encoding: gzip
+# in the HTTP header, as described in RFC 1952
+#
+# @param data the unencoded data
+# @return the encoded data
+
+def gzip_encode(data):
+ """data -> gzip encoded data
+
+ Encode data using the gzip content encoding as described in RFC 1952
+ """
+ if not gzip:
+ raise NotImplementedError
+ f = BytesIO()
+ gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1)
+ gzf.write(data)
+ gzf.close()
+ encoded = f.getvalue()
+ f.close()
+ return encoded
+
+##
+# Decode a string using the gzip content encoding such as specified by the
+# Content-Encoding: gzip
+# in the HTTP header, as described in RFC 1952
+#
+# @param data The encoded data
+# @return the unencoded data
+# @raises ValueError if data is not correctly coded.
+
+def gzip_decode(data):
+ """gzip encoded data -> unencoded data
+
+ Decode data using the gzip content encoding as described in RFC 1952
+ """
+ if not gzip:
+ raise NotImplementedError
+ f = BytesIO(data)
+ gzf = gzip.GzipFile(mode="rb", fileobj=f)
+ try:
+ decoded = gzf.read()
+ except IOError:
+ raise ValueError("invalid data")
+ f.close()
+ gzf.close()
+ return decoded
+
+##
+# Return a decoded file-like object for the gzip encoding
+# as described in RFC 1952.
+#
+# @param response A stream supporting a read() method
+# @return a file-like object that the decoded data can be read() from
+
+class GzipDecodedResponse(gzip.GzipFile if gzip else object):
+ """a file-like object to decode a response encoded with the gzip
+ method, as described in RFC 1952.
+ """
+ def __init__(self, response):
+ #response doesn't support tell() and read(), required by
+ #GzipFile
+ if not gzip:
+ raise NotImplementedError
+ self.io = BytesIO(response.read())
+ gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io)
+
+ def close(self):
+ gzip.GzipFile.close(self)
+ self.io.close()
+
+
+# --------------------------------------------------------------------
+# request dispatcher
+
+class _Method(object):
+ # some magic to bind an XML-RPC method to an RPC server.
+ # supports "nested" methods (e.g. examples.getStateName)
+ def __init__(self, send, name):
+ self.__send = send
+ self.__name = name
+ def __getattr__(self, name):
+ return _Method(self.__send, "%s.%s" % (self.__name, name))
+ def __call__(self, *args):
+ return self.__send(self.__name, args)
+
+##
+# Standard transport class for XML-RPC over HTTP.
+#
+# You can create custom transports by subclassing this method, and
+# overriding selected methods.
+
+class Transport(object):
+ """Handles an HTTP transaction to an XML-RPC server."""
+
+ # client identifier (may be overridden)
+ user_agent = "Python-xmlrpc/%s" % __version__
+
+ #if true, we'll request gzip encoding
+ accept_gzip_encoding = True
+
+ # if positive, encode request using gzip if it exceeds this threshold
+ # note that many server will get confused, so only use it if you know
+ # that they can decode such a request
+ encode_threshold = None #None = don't encode
+
+ def __init__(self, use_datetime=False, use_builtin_types=False):
+ self._use_datetime = use_datetime
+ self._use_builtin_types = use_builtin_types
+ self._connection = (None, None)
+ self._extra_headers = []
+
+ ##
+ # Send a complete request, and parse the response.
+ # Retry request if a cached connection has disconnected.
+ #
+ # @param host Target host.
+ # @param handler Target PRC handler.
+ # @param request_body XML-RPC request body.
+ # @param verbose Debugging flag.
+ # @return Parsed response.
+
+ def request(self, host, handler, request_body, verbose=False):
+ #retry request once if cached connection has gone cold
+ for i in (0, 1):
+ try:
+ return self.single_request(host, handler, request_body, verbose)
+ except socket.error as e:
+ if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE):
+ raise
+ except http_client.BadStatusLine: #close after we sent request
+ if i:
+ raise
+
+ def single_request(self, host, handler, request_body, verbose=False):
+ # issue XML-RPC request
+ try:
+ http_conn = self.send_request(host, handler, request_body, verbose)
+ resp = http_conn.getresponse()
+ if resp.status == 200:
+ self.verbose = verbose
+ return self.parse_response(resp)
+
+ except Fault:
+ raise
+ except Exception:
+ #All unexpected errors leave connection in
+ # a strange state, so we clear it.
+ self.close()
+ raise
+
+ #We got an error response.
+ #Discard any response data and raise exception
+ if resp.getheader("content-length", ""):
+ resp.read()
+ raise ProtocolError(
+ host + handler,
+ resp.status, resp.reason,
+ dict(resp.getheaders())
+ )
+
+
+ ##
+ # Create parser.
+ #
+ # @return A 2-tuple containing a parser and a unmarshaller.
+
+ def getparser(self):
+ # get parser and unmarshaller
+ return getparser(use_datetime=self._use_datetime,
+ use_builtin_types=self._use_builtin_types)
+
+ ##
+ # Get authorization info from host parameter
+ # Host may be a string, or a (host, x509-dict) tuple; if a string,
+ # it is checked for a "user:pw@host" format, and a "Basic
+ # Authentication" header is added if appropriate.
+ #
+ # @param host Host descriptor (URL or (URL, x509 info) tuple).
+ # @return A 3-tuple containing (actual host, extra headers,
+ # x509 info). The header and x509 fields may be None.
+
+ def get_host_info(self, host):
+
+ x509 = {}
+ if isinstance(host, tuple):
+ host, x509 = host
+
+ auth, host = urllib_parse.splituser(host)
+
+ if auth:
+ auth = urllib_parse.unquote_to_bytes(auth)
+ auth = base64.encodebytes(auth).decode("utf-8")
+ auth = "".join(auth.split()) # get rid of whitespace
+ extra_headers = [
+ ("Authorization", "Basic " + auth)
+ ]
+ else:
+ extra_headers = []
+
+ return host, extra_headers, x509
+
+ ##
+ # Connect to server.
+ #
+ # @param host Target host.
+ # @return An HTTPConnection object
+
+ def make_connection(self, host):
+ #return an existing connection if possible. This allows
+ #HTTP/1.1 keep-alive.
+ if self._connection and host == self._connection[0]:
+ return self._connection[1]
+ # create a HTTP connection object from a host descriptor
+ chost, self._extra_headers, x509 = self.get_host_info(host)
+ self._connection = host, http_client.HTTPConnection(chost)
+ return self._connection[1]
+
+ ##
+ # Clear any cached connection object.
+ # Used in the event of socket errors.
+ #
+ def close(self):
+ if self._connection[1]:
+ self._connection[1].close()
+ self._connection = (None, None)
+
+ ##
+ # Send HTTP request.
+ #
+ # @param host Host descriptor (URL or (URL, x509 info) tuple).
+ # @param handler Targer RPC handler (a path relative to host)
+ # @param request_body The XML-RPC request body
+ # @param debug Enable debugging if debug is true.
+ # @return An HTTPConnection.
+
+ def send_request(self, host, handler, request_body, debug):
+ connection = self.make_connection(host)
+ headers = self._extra_headers[:]
+ if debug:
+ connection.set_debuglevel(1)
+ if self.accept_gzip_encoding and gzip:
+ connection.putrequest("POST", handler, skip_accept_encoding=True)
+ headers.append(("Accept-Encoding", "gzip"))
+ else:
+ connection.putrequest("POST", handler)
+ headers.append(("Content-Type", "text/xml"))
+ headers.append(("User-Agent", self.user_agent))
+ self.send_headers(connection, headers)
+ self.send_content(connection, request_body)
+ return connection
+
+ ##
+ # Send request headers.
+ # This function provides a useful hook for subclassing
+ #
+ # @param connection httpConnection.
+ # @param headers list of key,value pairs for HTTP headers
+
+ def send_headers(self, connection, headers):
+ for key, val in headers:
+ connection.putheader(key, val)
+
+ ##
+ # Send request body.
+ # This function provides a useful hook for subclassing
+ #
+ # @param connection httpConnection.
+ # @param request_body XML-RPC request body.
+
+ def send_content(self, connection, request_body):
+ #optionally encode the request
+ if (self.encode_threshold is not None and
+ self.encode_threshold < len(request_body) and
+ gzip):
+ connection.putheader("Content-Encoding", "gzip")
+ request_body = gzip_encode(request_body)
+
+ connection.putheader("Content-Length", str(len(request_body)))
+ connection.endheaders(request_body)
+
+ ##
+ # Parse response.
+ #
+ # @param file Stream.
+ # @return Response tuple and target method.
+
+ def parse_response(self, response):
+ # read response data from httpresponse, and parse it
+ # Check for new http response object, otherwise it is a file object.
+ if hasattr(response, 'getheader'):
+ if response.getheader("Content-Encoding", "") == "gzip":
+ stream = GzipDecodedResponse(response)
+ else:
+ stream = response
+ else:
+ stream = response
+
+ p, u = self.getparser()
+
+ while 1:
+ data = stream.read(1024)
+ if not data:
+ break
+ if self.verbose:
+ print("body:", repr(data))
+ p.feed(data)
+
+ if stream is not response:
+ stream.close()
+ p.close()
+
+ return u.close()
+
+##
+# Standard transport class for XML-RPC over HTTPS.
+
+class SafeTransport(Transport):
+ """Handles an HTTPS transaction to an XML-RPC server."""
+
+ # FIXME: mostly untested
+
+ def make_connection(self, host):
+ if self._connection and host == self._connection[0]:
+ return self._connection[1]
+
+ if not hasattr(http_client, "HTTPSConnection"):
+ raise NotImplementedError(
+ "your version of http.client doesn't support HTTPS")
+ # create a HTTPS connection object from a host descriptor
+ # host may be a string, or a (host, x509-dict) tuple
+ chost, self._extra_headers, x509 = self.get_host_info(host)
+ self._connection = host, http_client.HTTPSConnection(chost,
+ None, **(x509 or {}))
+ return self._connection[1]
+
+##
+# Standard server proxy. This class establishes a virtual connection
+# to an XML-RPC server.
+#
+# This class is available as ServerProxy and Server. New code should
+# use ServerProxy, to avoid confusion.
+#
+# @def ServerProxy(uri, **options)
+# @param uri The connection point on the server.
+# @keyparam transport A transport factory, compatible with the
+# standard transport class.
+# @keyparam encoding The default encoding used for 8-bit strings
+# (default is UTF-8).
+# @keyparam verbose Use a true value to enable debugging output.
+# (printed to standard output).
+# @see Transport
+
+class ServerProxy(object):
+ """uri [,options] -> a logical connection to an XML-RPC server
+
+ uri is the connection point on the server, given as
+ scheme://host/target.
+
+ The standard implementation always supports the "http" scheme. If
+ SSL socket support is available (Python 2.0), it also supports
+ "https".
+
+ If the target part and the slash preceding it are both omitted,
+ "/RPC2" is assumed.
+
+ The following options can be given as keyword arguments:
+
+ transport: a transport factory
+ encoding: the request encoding (default is UTF-8)
+
+ All 8-bit strings passed to the server proxy are assumed to use
+ the given encoding.
+ """
+
+ def __init__(self, uri, transport=None, encoding=None, verbose=False,
+ allow_none=False, use_datetime=False, use_builtin_types=False):
+ # establish a "logical" server connection
+
+ # get the url
+ type, uri = urllib_parse.splittype(uri)
+ if type not in ("http", "https"):
+ raise IOError("unsupported XML-RPC protocol")
+ self.__host, self.__handler = urllib_parse.splithost(uri)
+ if not self.__handler:
+ self.__handler = "/RPC2"
+
+ if transport is None:
+ if type == "https":
+ handler = SafeTransport
+ else:
+ handler = Transport
+ transport = handler(use_datetime=use_datetime,
+ use_builtin_types=use_builtin_types)
+ self.__transport = transport
+
+ self.__encoding = encoding or 'utf-8'
+ self.__verbose = verbose
+ self.__allow_none = allow_none
+
+ def __close(self):
+ self.__transport.close()
+
+ def __request(self, methodname, params):
+ # call a method on the remote server
+
+ request = dumps(params, methodname, encoding=self.__encoding,
+ allow_none=self.__allow_none).encode(self.__encoding)
+
+ response = self.__transport.request(
+ self.__host,
+ self.__handler,
+ request,
+ verbose=self.__verbose
+ )
+
+ if len(response) == 1:
+ response = response[0]
+
+ return response
+
+ def __repr__(self):
+ return (
+ "" %
+ (self.__host, self.__handler)
+ )
+
+ __str__ = __repr__
+
+ def __getattr__(self, name):
+ # magic method dispatcher
+ return _Method(self.__request, name)
+
+ # note: to call a remote object with an non-standard name, use
+ # result getattr(server, "strange-python-name")(args)
+
+ def __call__(self, attr):
+ """A workaround to get special attributes on the ServerProxy
+ without interfering with the magic __getattr__
+ """
+ if attr == "close":
+ return self.__close
+ elif attr == "transport":
+ return self.__transport
+ raise AttributeError("Attribute %r not found" % (attr,))
+
+# compatibility
+
+Server = ServerProxy
+
+# --------------------------------------------------------------------
+# test code
+
+if __name__ == "__main__":
+
+ # simple test program (from the XML-RPC specification)
+
+ # local server, available from Lib/xmlrpc/server.py
+ server = ServerProxy("http://localhost:8000")
+
+ try:
+ print(server.currentTime.getCurrentTime())
+ except Error as v:
+ print("ERROR", v)
+
+ multi = MultiCall(server)
+ multi.getData()
+ multi.pow(2,9)
+ multi.add(1,2)
+ try:
+ for response in multi():
+ print(response)
+ except Error as v:
+ print("ERROR", v)
diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/server.py b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/server.py
new file mode 100644
index 000000000..28072bfec
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/server.py
@@ -0,0 +1,999 @@
+r"""
+Ported using Python-Future from the Python 3.3 standard library.
+
+XML-RPC Servers.
+
+This module can be used to create simple XML-RPC servers
+by creating a server and either installing functions, a
+class instance, or by extending the SimpleXMLRPCServer
+class.
+
+It can also be used to handle XML-RPC requests in a CGI
+environment using CGIXMLRPCRequestHandler.
+
+The Doc* classes can be used to create XML-RPC servers that
+serve pydoc-style documentation in response to HTTP
+GET requests. This documentation is dynamically generated
+based on the functions and methods registered with the
+server.
+
+A list of possible usage patterns follows:
+
+1. Install functions:
+
+server = SimpleXMLRPCServer(("localhost", 8000))
+server.register_function(pow)
+server.register_function(lambda x,y: x+y, 'add')
+server.serve_forever()
+
+2. Install an instance:
+
+class MyFuncs:
+ def __init__(self):
+ # make all of the sys functions available through sys.func_name
+ import sys
+ self.sys = sys
+ def _listMethods(self):
+ # implement this method so that system.listMethods
+ # knows to advertise the sys methods
+ return list_public_methods(self) + \
+ ['sys.' + method for method in list_public_methods(self.sys)]
+ def pow(self, x, y): return pow(x, y)
+ def add(self, x, y) : return x + y
+
+server = SimpleXMLRPCServer(("localhost", 8000))
+server.register_introspection_functions()
+server.register_instance(MyFuncs())
+server.serve_forever()
+
+3. Install an instance with custom dispatch method:
+
+class Math:
+ def _listMethods(self):
+ # this method must be present for system.listMethods
+ # to work
+ return ['add', 'pow']
+ def _methodHelp(self, method):
+ # this method must be present for system.methodHelp
+ # to work
+ if method == 'add':
+ return "add(2,3) => 5"
+ elif method == 'pow':
+ return "pow(x, y[, z]) => number"
+ else:
+ # By convention, return empty
+ # string if no help is available
+ return ""
+ def _dispatch(self, method, params):
+ if method == 'pow':
+ return pow(*params)
+ elif method == 'add':
+ return params[0] + params[1]
+ else:
+ raise ValueError('bad method')
+
+server = SimpleXMLRPCServer(("localhost", 8000))
+server.register_introspection_functions()
+server.register_instance(Math())
+server.serve_forever()
+
+4. Subclass SimpleXMLRPCServer:
+
+class MathServer(SimpleXMLRPCServer):
+ def _dispatch(self, method, params):
+ try:
+ # We are forcing the 'export_' prefix on methods that are
+ # callable through XML-RPC to prevent potential security
+ # problems
+ func = getattr(self, 'export_' + method)
+ except AttributeError:
+ raise Exception('method "%s" is not supported' % method)
+ else:
+ return func(*params)
+
+ def export_add(self, x, y):
+ return x + y
+
+server = MathServer(("localhost", 8000))
+server.serve_forever()
+
+5. CGI script:
+
+server = CGIXMLRPCRequestHandler()
+server.register_function(pow)
+server.handle_request()
+"""
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+from future.builtins import int, str
+
+# Written by Brian Quinlan (brian@sweetapp.com).
+# Based on code written by Fredrik Lundh.
+
+from future.backports.xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode
+from future.backports.http.server import BaseHTTPRequestHandler
+import future.backports.http.server as http_server
+from future.backports import socketserver
+import sys
+import os
+import re
+import pydoc
+import inspect
+import traceback
+try:
+ import fcntl
+except ImportError:
+ fcntl = None
+
+def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
+ """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
+
+ Resolves a dotted attribute name to an object. Raises
+ an AttributeError if any attribute in the chain starts with a '_'.
+
+ If the optional allow_dotted_names argument is false, dots are not
+ supported and this function operates similar to getattr(obj, attr).
+ """
+
+ if allow_dotted_names:
+ attrs = attr.split('.')
+ else:
+ attrs = [attr]
+
+ for i in attrs:
+ if i.startswith('_'):
+ raise AttributeError(
+ 'attempt to access private attribute "%s"' % i
+ )
+ else:
+ obj = getattr(obj,i)
+ return obj
+
+def list_public_methods(obj):
+ """Returns a list of attribute strings, found in the specified
+ object, which represent callable attributes"""
+
+ return [member for member in dir(obj)
+ if not member.startswith('_') and
+ callable(getattr(obj, member))]
+
+class SimpleXMLRPCDispatcher(object):
+ """Mix-in class that dispatches XML-RPC requests.
+
+ This class is used to register XML-RPC method handlers
+ and then to dispatch them. This class doesn't need to be
+ instanced directly when used by SimpleXMLRPCServer but it
+ can be instanced when used by the MultiPathXMLRPCServer
+ """
+
+ def __init__(self, allow_none=False, encoding=None,
+ use_builtin_types=False):
+ self.funcs = {}
+ self.instance = None
+ self.allow_none = allow_none
+ self.encoding = encoding or 'utf-8'
+ self.use_builtin_types = use_builtin_types
+
+ def register_instance(self, instance, allow_dotted_names=False):
+ """Registers an instance to respond to XML-RPC requests.
+
+ Only one instance can be installed at a time.
+
+ If the registered instance has a _dispatch method then that
+ method will be called with the name of the XML-RPC method and
+ its parameters as a tuple
+ e.g. instance._dispatch('add',(2,3))
+
+ If the registered instance does not have a _dispatch method
+ then the instance will be searched to find a matching method
+ and, if found, will be called. Methods beginning with an '_'
+ are considered private and will not be called by
+ SimpleXMLRPCServer.
+
+ If a registered function matches a XML-RPC request, then it
+ will be called instead of the registered instance.
+
+ If the optional allow_dotted_names argument is true and the
+ instance does not have a _dispatch method, method names
+ containing dots are supported and resolved, as long as none of
+ the name segments start with an '_'.
+
+ *** SECURITY WARNING: ***
+
+ Enabling the allow_dotted_names options allows intruders
+ to access your module's global variables and may allow
+ intruders to execute arbitrary code on your machine. Only
+ use this option on a secure, closed network.
+
+ """
+
+ self.instance = instance
+ self.allow_dotted_names = allow_dotted_names
+
+ def register_function(self, function, name=None):
+ """Registers a function to respond to XML-RPC requests.
+
+ The optional name argument can be used to set a Unicode name
+ for the function.
+ """
+
+ if name is None:
+ name = function.__name__
+ self.funcs[name] = function
+
+ def register_introspection_functions(self):
+ """Registers the XML-RPC introspection methods in the system
+ namespace.
+
+ see http://xmlrpc.usefulinc.com/doc/reserved.html
+ """
+
+ self.funcs.update({'system.listMethods' : self.system_listMethods,
+ 'system.methodSignature' : self.system_methodSignature,
+ 'system.methodHelp' : self.system_methodHelp})
+
+ def register_multicall_functions(self):
+ """Registers the XML-RPC multicall method in the system
+ namespace.
+
+ see http://www.xmlrpc.com/discuss/msgReader$1208"""
+
+ self.funcs.update({'system.multicall' : self.system_multicall})
+
+ def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
+ """Dispatches an XML-RPC method from marshalled (XML) data.
+
+ XML-RPC methods are dispatched from the marshalled (XML) data
+ using the _dispatch method and the result is returned as
+ marshalled data. For backwards compatibility, a dispatch
+ function can be provided as an argument (see comment in
+ SimpleXMLRPCRequestHandler.do_POST) but overriding the
+ existing method through subclassing is the preferred means
+ of changing method dispatch behavior.
+ """
+
+ try:
+ params, method = loads(data, use_builtin_types=self.use_builtin_types)
+
+ # generate response
+ if dispatch_method is not None:
+ response = dispatch_method(method, params)
+ else:
+ response = self._dispatch(method, params)
+ # wrap response in a singleton tuple
+ response = (response,)
+ response = dumps(response, methodresponse=1,
+ allow_none=self.allow_none, encoding=self.encoding)
+ except Fault as fault:
+ response = dumps(fault, allow_none=self.allow_none,
+ encoding=self.encoding)
+ except:
+ # report exception back to server
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ response = dumps(
+ Fault(1, "%s:%s" % (exc_type, exc_value)),
+ encoding=self.encoding, allow_none=self.allow_none,
+ )
+
+ return response.encode(self.encoding)
+
+ def system_listMethods(self):
+ """system.listMethods() => ['add', 'subtract', 'multiple']
+
+ Returns a list of the methods supported by the server."""
+
+ methods = set(self.funcs.keys())
+ if self.instance is not None:
+ # Instance can implement _listMethod to return a list of
+ # methods
+ if hasattr(self.instance, '_listMethods'):
+ methods |= set(self.instance._listMethods())
+ # if the instance has a _dispatch method then we
+ # don't have enough information to provide a list
+ # of methods
+ elif not hasattr(self.instance, '_dispatch'):
+ methods |= set(list_public_methods(self.instance))
+ return sorted(methods)
+
+ def system_methodSignature(self, method_name):
+ """system.methodSignature('add') => [double, int, int]
+
+ Returns a list describing the signature of the method. In the
+ above example, the add method takes two integers as arguments
+ and returns a double result.
+
+ This server does NOT support system.methodSignature."""
+
+ # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
+
+ return 'signatures not supported'
+
+ def system_methodHelp(self, method_name):
+ """system.methodHelp('add') => "Adds two integers together"
+
+ Returns a string containing documentation for the specified method."""
+
+ method = None
+ if method_name in self.funcs:
+ method = self.funcs[method_name]
+ elif self.instance is not None:
+ # Instance can implement _methodHelp to return help for a method
+ if hasattr(self.instance, '_methodHelp'):
+ return self.instance._methodHelp(method_name)
+ # if the instance has a _dispatch method then we
+ # don't have enough information to provide help
+ elif not hasattr(self.instance, '_dispatch'):
+ try:
+ method = resolve_dotted_attribute(
+ self.instance,
+ method_name,
+ self.allow_dotted_names
+ )
+ except AttributeError:
+ pass
+
+ # Note that we aren't checking that the method actually
+ # be a callable object of some kind
+ if method is None:
+ return ""
+ else:
+ return pydoc.getdoc(method)
+
+ def system_multicall(self, call_list):
+ """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
+[[4], ...]
+
+ Allows the caller to package multiple XML-RPC calls into a single
+ request.
+
+ See http://www.xmlrpc.com/discuss/msgReader$1208
+ """
+
+ results = []
+ for call in call_list:
+ method_name = call['methodName']
+ params = call['params']
+
+ try:
+ # XXX A marshalling error in any response will fail the entire
+ # multicall. If someone cares they should fix this.
+ results.append([self._dispatch(method_name, params)])
+ except Fault as fault:
+ results.append(
+ {'faultCode' : fault.faultCode,
+ 'faultString' : fault.faultString}
+ )
+ except:
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ results.append(
+ {'faultCode' : 1,
+ 'faultString' : "%s:%s" % (exc_type, exc_value)}
+ )
+ return results
+
+ def _dispatch(self, method, params):
+ """Dispatches the XML-RPC method.
+
+ XML-RPC calls are forwarded to a registered function that
+ matches the called XML-RPC method name. If no such function
+ exists then the call is forwarded to the registered instance,
+ if available.
+
+ If the registered instance has a _dispatch method then that
+ method will be called with the name of the XML-RPC method and
+ its parameters as a tuple
+ e.g. instance._dispatch('add',(2,3))
+
+ If the registered instance does not have a _dispatch method
+ then the instance will be searched to find a matching method
+ and, if found, will be called.
+
+ Methods beginning with an '_' are considered private and will
+ not be called.
+ """
+
+ func = None
+ try:
+ # check to see if a matching function has been registered
+ func = self.funcs[method]
+ except KeyError:
+ if self.instance is not None:
+ # check for a _dispatch method
+ if hasattr(self.instance, '_dispatch'):
+ return self.instance._dispatch(method, params)
+ else:
+ # call instance method directly
+ try:
+ func = resolve_dotted_attribute(
+ self.instance,
+ method,
+ self.allow_dotted_names
+ )
+ except AttributeError:
+ pass
+
+ if func is not None:
+ return func(*params)
+ else:
+ raise Exception('method "%s" is not supported' % method)
+
+class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler):
+ """Simple XML-RPC request handler class.
+
+ Handles all HTTP POST requests and attempts to decode them as
+ XML-RPC requests.
+ """
+
+ # Class attribute listing the accessible path components;
+ # paths not on this list will result in a 404 error.
+ rpc_paths = ('/', '/RPC2')
+
+ #if not None, encode responses larger than this, if possible
+ encode_threshold = 1400 #a common MTU
+
+ #Override form StreamRequestHandler: full buffering of output
+ #and no Nagle.
+ wbufsize = -1
+ disable_nagle_algorithm = True
+
+ # a re to match a gzip Accept-Encoding
+ aepattern = re.compile(r"""
+ \s* ([^\s;]+) \s* #content-coding
+ (;\s* q \s*=\s* ([0-9\.]+))? #q
+ """, re.VERBOSE | re.IGNORECASE)
+
+ def accept_encodings(self):
+ r = {}
+ ae = self.headers.get("Accept-Encoding", "")
+ for e in ae.split(","):
+ match = self.aepattern.match(e)
+ if match:
+ v = match.group(3)
+ v = float(v) if v else 1.0
+ r[match.group(1)] = v
+ return r
+
+ def is_rpc_path_valid(self):
+ if self.rpc_paths:
+ return self.path in self.rpc_paths
+ else:
+ # If .rpc_paths is empty, just assume all paths are legal
+ return True
+
+ def do_POST(self):
+ """Handles the HTTP POST request.
+
+ Attempts to interpret all HTTP POST requests as XML-RPC calls,
+ which are forwarded to the server's _dispatch method for handling.
+ """
+
+ # Check that the path is legal
+ if not self.is_rpc_path_valid():
+ self.report_404()
+ return
+
+ try:
+ # Get arguments by reading body of request.
+ # We read this in chunks to avoid straining
+ # socket.read(); around the 10 or 15Mb mark, some platforms
+ # begin to have problems (bug #792570).
+ max_chunk_size = 10*1024*1024
+ size_remaining = int(self.headers["content-length"])
+ L = []
+ while size_remaining:
+ chunk_size = min(size_remaining, max_chunk_size)
+ chunk = self.rfile.read(chunk_size)
+ if not chunk:
+ break
+ L.append(chunk)
+ size_remaining -= len(L[-1])
+ data = b''.join(L)
+
+ data = self.decode_request_content(data)
+ if data is None:
+ return #response has been sent
+
+ # In previous versions of SimpleXMLRPCServer, _dispatch
+ # could be overridden in this class, instead of in
+ # SimpleXMLRPCDispatcher. To maintain backwards compatibility,
+ # check to see if a subclass implements _dispatch and dispatch
+ # using that method if present.
+ response = self.server._marshaled_dispatch(
+ data, getattr(self, '_dispatch', None), self.path
+ )
+ except Exception as e: # This should only happen if the module is buggy
+ # internal error, report as HTTP server error
+ self.send_response(500)
+
+ # Send information about the exception if requested
+ if hasattr(self.server, '_send_traceback_header') and \
+ self.server._send_traceback_header:
+ self.send_header("X-exception", str(e))
+ trace = traceback.format_exc()
+ trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII')
+ self.send_header("X-traceback", trace)
+
+ self.send_header("Content-length", "0")
+ self.end_headers()
+ else:
+ self.send_response(200)
+ self.send_header("Content-type", "text/xml")
+ if self.encode_threshold is not None:
+ if len(response) > self.encode_threshold:
+ q = self.accept_encodings().get("gzip", 0)
+ if q:
+ try:
+ response = gzip_encode(response)
+ self.send_header("Content-Encoding", "gzip")
+ except NotImplementedError:
+ pass
+ self.send_header("Content-length", str(len(response)))
+ self.end_headers()
+ self.wfile.write(response)
+
+ def decode_request_content(self, data):
+ #support gzip encoding of request
+ encoding = self.headers.get("content-encoding", "identity").lower()
+ if encoding == "identity":
+ return data
+ if encoding == "gzip":
+ try:
+ return gzip_decode(data)
+ except NotImplementedError:
+ self.send_response(501, "encoding %r not supported" % encoding)
+ except ValueError:
+ self.send_response(400, "error decoding gzip content")
+ else:
+ self.send_response(501, "encoding %r not supported" % encoding)
+ self.send_header("Content-length", "0")
+ self.end_headers()
+
+ def report_404 (self):
+ # Report a 404 error
+ self.send_response(404)
+ response = b'No such page'
+ self.send_header("Content-type", "text/plain")
+ self.send_header("Content-length", str(len(response)))
+ self.end_headers()
+ self.wfile.write(response)
+
+ def log_request(self, code='-', size='-'):
+ """Selectively log an accepted request."""
+
+ if self.server.logRequests:
+ BaseHTTPRequestHandler.log_request(self, code, size)
+
+class SimpleXMLRPCServer(socketserver.TCPServer,
+ SimpleXMLRPCDispatcher):
+ """Simple XML-RPC server.
+
+ Simple XML-RPC server that allows functions and a single instance
+ to be installed to handle requests. The default implementation
+ attempts to dispatch XML-RPC calls to the functions or instance
+ installed in the server. Override the _dispatch method inherited
+ from SimpleXMLRPCDispatcher to change this behavior.
+ """
+
+ allow_reuse_address = True
+
+ # Warning: this is for debugging purposes only! Never set this to True in
+ # production code, as will be sending out sensitive information (exception
+ # and stack trace details) when exceptions are raised inside
+ # SimpleXMLRPCRequestHandler.do_POST
+ _send_traceback_header = False
+
+ def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
+ logRequests=True, allow_none=False, encoding=None,
+ bind_and_activate=True, use_builtin_types=False):
+ self.logRequests = logRequests
+
+ SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types)
+ socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate)
+
+ # [Bug #1222790] If possible, set close-on-exec flag; if a
+ # method spawns a subprocess, the subprocess shouldn't have
+ # the listening socket open.
+ if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
+ flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
+ flags |= fcntl.FD_CLOEXEC
+ fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
+
+class MultiPathXMLRPCServer(SimpleXMLRPCServer):
+ """Multipath XML-RPC Server
+ This specialization of SimpleXMLRPCServer allows the user to create
+ multiple Dispatcher instances and assign them to different
+ HTTP request paths. This makes it possible to run two or more
+ 'virtual XML-RPC servers' at the same port.
+ Make sure that the requestHandler accepts the paths in question.
+ """
+ def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
+ logRequests=True, allow_none=False, encoding=None,
+ bind_and_activate=True, use_builtin_types=False):
+
+ SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none,
+ encoding, bind_and_activate, use_builtin_types)
+ self.dispatchers = {}
+ self.allow_none = allow_none
+ self.encoding = encoding or 'utf-8'
+
+ def add_dispatcher(self, path, dispatcher):
+ self.dispatchers[path] = dispatcher
+ return dispatcher
+
+ def get_dispatcher(self, path):
+ return self.dispatchers[path]
+
+ def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
+ try:
+ response = self.dispatchers[path]._marshaled_dispatch(
+ data, dispatch_method, path)
+ except:
+ # report low level exception back to server
+ # (each dispatcher should have handled their own
+ # exceptions)
+ exc_type, exc_value = sys.exc_info()[:2]
+ response = dumps(
+ Fault(1, "%s:%s" % (exc_type, exc_value)),
+ encoding=self.encoding, allow_none=self.allow_none)
+ response = response.encode(self.encoding)
+ return response
+
+class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
+ """Simple handler for XML-RPC data passed through CGI."""
+
+ def __init__(self, allow_none=False, encoding=None, use_builtin_types=False):
+ SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types)
+
+ def handle_xmlrpc(self, request_text):
+ """Handle a single XML-RPC request"""
+
+ response = self._marshaled_dispatch(request_text)
+
+ print('Content-Type: text/xml')
+ print('Content-Length: %d' % len(response))
+ print()
+ sys.stdout.flush()
+ sys.stdout.buffer.write(response)
+ sys.stdout.buffer.flush()
+
+ def handle_get(self):
+ """Handle a single HTTP GET request.
+
+ Default implementation indicates an error because
+ XML-RPC uses the POST method.
+ """
+
+ code = 400
+ message, explain = BaseHTTPRequestHandler.responses[code]
+
+ response = http_server.DEFAULT_ERROR_MESSAGE % \
+ {
+ 'code' : code,
+ 'message' : message,
+ 'explain' : explain
+ }
+ response = response.encode('utf-8')
+ print('Status: %d %s' % (code, message))
+ print('Content-Type: %s' % http_server.DEFAULT_ERROR_CONTENT_TYPE)
+ print('Content-Length: %d' % len(response))
+ print()
+ sys.stdout.flush()
+ sys.stdout.buffer.write(response)
+ sys.stdout.buffer.flush()
+
+ def handle_request(self, request_text=None):
+ """Handle a single XML-RPC request passed through a CGI post method.
+
+ If no XML data is given then it is read from stdin. The resulting
+ XML-RPC response is printed to stdout along with the correct HTTP
+ headers.
+ """
+
+ if request_text is None and \
+ os.environ.get('REQUEST_METHOD', None) == 'GET':
+ self.handle_get()
+ else:
+ # POST data is normally available through stdin
+ try:
+ length = int(os.environ.get('CONTENT_LENGTH', None))
+ except (ValueError, TypeError):
+ length = -1
+ if request_text is None:
+ request_text = sys.stdin.read(length)
+
+ self.handle_xmlrpc(request_text)
+
+
+# -----------------------------------------------------------------------------
+# Self documenting XML-RPC Server.
+
+class ServerHTMLDoc(pydoc.HTMLDoc):
+ """Class used to generate pydoc HTML document for a server"""
+
+ def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
+ """Mark up some plain text, given a context of symbols to look for.
+ Each context dictionary maps object names to anchor names."""
+ escape = escape or self.escape
+ results = []
+ here = 0
+
+ # XXX Note that this regular expression does not allow for the
+ # hyperlinking of arbitrary strings being used as method
+ # names. Only methods with names consisting of word characters
+ # and '.'s are hyperlinked.
+ pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
+ r'RFC[- ]?(\d+)|'
+ r'PEP[- ]?(\d+)|'
+ r'(self\.)?((?:\w|\.)+))\b')
+ while 1:
+ match = pattern.search(text, here)
+ if not match: break
+ start, end = match.span()
+ results.append(escape(text[here:start]))
+
+ all, scheme, rfc, pep, selfdot, name = match.groups()
+ if scheme:
+ url = escape(all).replace('"', '"')
+ results.append('%s ' % (url, url))
+ elif rfc:
+ url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
+ results.append('%s ' % (url, escape(all)))
+ elif pep:
+ url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
+ results.append('%s ' % (url, escape(all)))
+ elif text[end:end+1] == '(':
+ results.append(self.namelink(name, methods, funcs, classes))
+ elif selfdot:
+ results.append('self.%s ' % name)
+ else:
+ results.append(self.namelink(name, classes))
+ here = end
+ results.append(escape(text[here:]))
+ return ''.join(results)
+
+ def docroutine(self, object, name, mod=None,
+ funcs={}, classes={}, methods={}, cl=None):
+ """Produce HTML documentation for a function or method object."""
+
+ anchor = (cl and cl.__name__ or '') + '-' + name
+ note = ''
+
+ title = '%s ' % (
+ self.escape(anchor), self.escape(name))
+
+ if inspect.ismethod(object):
+ args = inspect.getfullargspec(object)
+ # exclude the argument bound to the instance, it will be
+ # confusing to the non-Python user
+ argspec = inspect.formatargspec (
+ args.args[1:],
+ args.varargs,
+ args.varkw,
+ args.defaults,
+ annotations=args.annotations,
+ formatvalue=self.formatvalue
+ )
+ elif inspect.isfunction(object):
+ args = inspect.getfullargspec(object)
+ argspec = inspect.formatargspec(
+ args.args, args.varargs, args.varkw, args.defaults,
+ annotations=args.annotations,
+ formatvalue=self.formatvalue)
+ else:
+ argspec = '(...)'
+
+ if isinstance(object, tuple):
+ argspec = object[0] or argspec
+ docstring = object[1] or ""
+ else:
+ docstring = pydoc.getdoc(object)
+
+ decl = title + argspec + (note and self.grey(
+ '%s ' % note))
+
+ doc = self.markup(
+ docstring, self.preformat, funcs, classes, methods)
+ doc = doc and '%s ' % doc
+ return '%s %s \n' % (decl, doc)
+
+ def docserver(self, server_name, package_documentation, methods):
+ """Produce HTML documentation for an XML-RPC server."""
+
+ fdict = {}
+ for key, value in methods.items():
+ fdict[key] = '#-' + key
+ fdict[value] = fdict[key]
+
+ server_name = self.escape(server_name)
+ head = '%s ' % server_name
+ result = self.heading(head, '#ffffff', '#7799ee')
+
+ doc = self.markup(package_documentation, self.preformat, fdict)
+ doc = doc and '%s ' % doc
+ result = result + '%s
\n' % doc
+
+ contents = []
+ method_items = sorted(methods.items())
+ for key, value in method_items:
+ contents.append(self.docroutine(value, key, funcs=fdict))
+ result = result + self.bigsection(
+ 'Methods', '#ffffff', '#eeaa77', ''.join(contents))
+
+ return result
+
+class XMLRPCDocGenerator(object):
+ """Generates documentation for an XML-RPC server.
+
+ This class is designed as mix-in and should not
+ be constructed directly.
+ """
+
+ def __init__(self):
+ # setup variables used for HTML documentation
+ self.server_name = 'XML-RPC Server Documentation'
+ self.server_documentation = \
+ "This server exports the following methods through the XML-RPC "\
+ "protocol."
+ self.server_title = 'XML-RPC Server Documentation'
+
+ def set_server_title(self, server_title):
+ """Set the HTML title of the generated server documentation"""
+
+ self.server_title = server_title
+
+ def set_server_name(self, server_name):
+ """Set the name of the generated HTML server documentation"""
+
+ self.server_name = server_name
+
+ def set_server_documentation(self, server_documentation):
+ """Set the documentation string for the entire server."""
+
+ self.server_documentation = server_documentation
+
+ def generate_html_documentation(self):
+ """generate_html_documentation() => html documentation for the server
+
+ Generates HTML documentation for the server using introspection for
+ installed functions and instances that do not implement the
+ _dispatch method. Alternatively, instances can choose to implement
+ the _get_method_argstring(method_name) method to provide the
+ argument string used in the documentation and the
+ _methodHelp(method_name) method to provide the help text used
+ in the documentation."""
+
+ methods = {}
+
+ for method_name in self.system_listMethods():
+ if method_name in self.funcs:
+ method = self.funcs[method_name]
+ elif self.instance is not None:
+ method_info = [None, None] # argspec, documentation
+ if hasattr(self.instance, '_get_method_argstring'):
+ method_info[0] = self.instance._get_method_argstring(method_name)
+ if hasattr(self.instance, '_methodHelp'):
+ method_info[1] = self.instance._methodHelp(method_name)
+
+ method_info = tuple(method_info)
+ if method_info != (None, None):
+ method = method_info
+ elif not hasattr(self.instance, '_dispatch'):
+ try:
+ method = resolve_dotted_attribute(
+ self.instance,
+ method_name
+ )
+ except AttributeError:
+ method = method_info
+ else:
+ method = method_info
+ else:
+ assert 0, "Could not find method in self.functions and no "\
+ "instance installed"
+
+ methods[method_name] = method
+
+ documenter = ServerHTMLDoc()
+ documentation = documenter.docserver(
+ self.server_name,
+ self.server_documentation,
+ methods
+ )
+
+ return documenter.page(self.server_title, documentation)
+
+class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
+ """XML-RPC and documentation request handler class.
+
+ Handles all HTTP POST requests and attempts to decode them as
+ XML-RPC requests.
+
+ Handles all HTTP GET requests and interprets them as requests
+ for documentation.
+ """
+
+ def do_GET(self):
+ """Handles the HTTP GET request.
+
+ Interpret all HTTP GET requests as requests for server
+ documentation.
+ """
+ # Check that the path is legal
+ if not self.is_rpc_path_valid():
+ self.report_404()
+ return
+
+ response = self.server.generate_html_documentation().encode('utf-8')
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.send_header("Content-length", str(len(response)))
+ self.end_headers()
+ self.wfile.write(response)
+
+class DocXMLRPCServer( SimpleXMLRPCServer,
+ XMLRPCDocGenerator):
+ """XML-RPC and HTML documentation server.
+
+ Adds the ability to serve server documentation to the capabilities
+ of SimpleXMLRPCServer.
+ """
+
+ def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler,
+ logRequests=True, allow_none=False, encoding=None,
+ bind_and_activate=True, use_builtin_types=False):
+ SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests,
+ allow_none, encoding, bind_and_activate,
+ use_builtin_types)
+ XMLRPCDocGenerator.__init__(self)
+
+class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler,
+ XMLRPCDocGenerator):
+ """Handler for XML-RPC data and documentation requests passed through
+ CGI"""
+
+ def handle_get(self):
+ """Handles the HTTP GET request.
+
+ Interpret all HTTP GET requests as requests for server
+ documentation.
+ """
+
+ response = self.generate_html_documentation().encode('utf-8')
+
+ print('Content-Type: text/html')
+ print('Content-Length: %d' % len(response))
+ print()
+ sys.stdout.flush()
+ sys.stdout.buffer.write(response)
+ sys.stdout.buffer.flush()
+
+ def __init__(self):
+ CGIXMLRPCRequestHandler.__init__(self)
+ XMLRPCDocGenerator.__init__(self)
+
+
+if __name__ == '__main__':
+ import datetime
+
+ class ExampleService:
+ def getData(self):
+ return '42'
+
+ class currentTime:
+ @staticmethod
+ def getCurrentTime():
+ return datetime.datetime.now()
+
+ server = SimpleXMLRPCServer(("localhost", 8000))
+ server.register_function(pow)
+ server.register_function(lambda x,y: x+y, 'add')
+ server.register_instance(ExampleService(), allow_dotted_names=True)
+ server.register_multicall_functions()
+ print('Serving XML-RPC on localhost port 8000')
+ print('It is advisable to run this example server within a secure, closed network.')
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ print("\nKeyboard interrupt received, exiting.")
+ server.server_close()
+ sys.exit(0)
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/__init__.py
new file mode 100644
index 000000000..216465a15
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/__init__.py
@@ -0,0 +1,51 @@
+"""
+A module that brings in equivalents of the new and modified Python 3
+builtins into Py2. Has no effect on Py3.
+
+See the docs `here `_
+(``docs/what-else.rst``) for more information.
+
+"""
+
+from future.builtins.iterators import (filter, map, zip)
+# The isinstance import is no longer needed. We provide it only for
+# backward-compatibility with future v0.8.2. It will be removed in future v1.0.
+from future.builtins.misc import (ascii, chr, hex, input, isinstance, next,
+ oct, open, pow, round, super)
+from future.utils import PY3
+
+if PY3:
+ import builtins
+ bytes = builtins.bytes
+ dict = builtins.dict
+ int = builtins.int
+ list = builtins.list
+ object = builtins.object
+ range = builtins.range
+ str = builtins.str
+ __all__ = []
+else:
+ from future.types import (newbytes as bytes,
+ newdict as dict,
+ newint as int,
+ newlist as list,
+ newobject as object,
+ newrange as range,
+ newstr as str)
+from future import utils
+
+
+if not utils.PY3:
+ # We only import names that shadow the builtins on Py2. No other namespace
+ # pollution on Py2.
+
+ # Only shadow builtins on Py2; no new names
+ __all__ = ['filter', 'map', 'zip',
+ 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow',
+ 'round', 'super',
+ 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str',
+ ]
+
+else:
+ # No namespace pollution on Py3
+ __all__ = []
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/disabled.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/disabled.py
new file mode 100644
index 000000000..f6d6ea9b8
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/disabled.py
@@ -0,0 +1,66 @@
+"""
+This disables builtin functions (and one exception class) which are
+removed from Python 3.3.
+
+This module is designed to be used like this::
+
+ from future.builtins.disabled import *
+
+This disables the following obsolete Py2 builtin functions::
+
+ apply, cmp, coerce, execfile, file, input, long,
+ raw_input, reduce, reload, unicode, xrange
+
+We don't hack __builtin__, which is very fragile because it contaminates
+imported modules too. Instead, we just create new functions with
+the same names as the obsolete builtins from Python 2 which raise
+NameError exceptions when called.
+
+Note that both ``input()`` and ``raw_input()`` are among the disabled
+functions (in this module). Although ``input()`` exists as a builtin in
+Python 3, the Python 2 ``input()`` builtin is unsafe to use because it
+can lead to shell injection. Therefore we shadow it by default upon ``from
+future.builtins.disabled import *``, in case someone forgets to import our
+replacement ``input()`` somehow and expects Python 3 semantics.
+
+See the ``future.builtins.misc`` module for a working version of
+``input`` with Python 3 semantics.
+
+(Note that callable() is not among the functions disabled; this was
+reintroduced into Python 3.2.)
+
+This exception class is also disabled:
+
+ StandardError
+
+"""
+
+from __future__ import division, absolute_import, print_function
+
+from future import utils
+
+
+OBSOLETE_BUILTINS = ['apply', 'chr', 'cmp', 'coerce', 'execfile', 'file',
+ 'input', 'long', 'raw_input', 'reduce', 'reload',
+ 'unicode', 'xrange', 'StandardError']
+
+
+def disabled_function(name):
+ '''
+ Returns a function that cannot be called
+ '''
+ def disabled(*args, **kwargs):
+ '''
+ A function disabled by the ``future`` module. This function is
+ no longer a builtin in Python 3.
+ '''
+ raise NameError('obsolete Python 2 builtin {0} is disabled'.format(name))
+ return disabled
+
+
+if not utils.PY3:
+ for fname in OBSOLETE_BUILTINS:
+ locals()[fname] = disabled_function(fname)
+ __all__ = OBSOLETE_BUILTINS
+else:
+ __all__ = []
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/iterators.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/iterators.py
new file mode 100644
index 000000000..dff651e0f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/iterators.py
@@ -0,0 +1,52 @@
+"""
+This module is designed to be used as follows::
+
+ from future.builtins.iterators import *
+
+And then, for example::
+
+ for i in range(10**15):
+ pass
+
+ for (a, b) in zip(range(10**15), range(-10**15, 0)):
+ pass
+
+Note that this is standard Python 3 code, plus some imports that do
+nothing on Python 3.
+
+The iterators this brings in are::
+
+- ``range``
+- ``filter``
+- ``map``
+- ``zip``
+
+On Python 2, ``range`` is a pure-Python backport of Python 3's ``range``
+iterator with slicing support. The other iterators (``filter``, ``map``,
+``zip``) are from the ``itertools`` module on Python 2. On Python 3 these
+are available in the module namespace but not exported for * imports via
+__all__ (zero no namespace pollution).
+
+Note that these are also available in the standard library
+``future_builtins`` module on Python 2 -- but not Python 3, so using
+the standard library version is not portable, nor anywhere near complete.
+"""
+
+from __future__ import division, absolute_import, print_function
+
+import itertools
+from future import utils
+
+if not utils.PY3:
+ filter = itertools.ifilter
+ map = itertools.imap
+ from future.types import newrange as range
+ zip = itertools.izip
+ __all__ = ['filter', 'map', 'range', 'zip']
+else:
+ import builtins
+ filter = builtins.filter
+ map = builtins.map
+ range = builtins.range
+ zip = builtins.zip
+ __all__ = []
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/misc.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/misc.py
new file mode 100644
index 000000000..90dc384ad
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/misc.py
@@ -0,0 +1,124 @@
+"""
+A module that brings in equivalents of various modified Python 3 builtins
+into Py2. Has no effect on Py3.
+
+The builtin functions are:
+
+- ``ascii`` (from Py2's future_builtins module)
+- ``hex`` (from Py2's future_builtins module)
+- ``oct`` (from Py2's future_builtins module)
+- ``chr`` (equivalent to ``unichr`` on Py2)
+- ``input`` (equivalent to ``raw_input`` on Py2)
+- ``next`` (calls ``__next__`` if it exists, else ``next`` method)
+- ``open`` (equivalent to io.open on Py2)
+- ``super`` (backport of Py3's magic zero-argument super() function
+- ``round`` (new "Banker's Rounding" behaviour from Py3)
+
+``isinstance`` is also currently exported for backwards compatibility
+with v0.8.2, although this has been deprecated since v0.9.
+
+
+input()
+-------
+Like the new ``input()`` function from Python 3 (without eval()), except
+that it returns bytes. Equivalent to Python 2's ``raw_input()``.
+
+Warning: By default, importing this module *removes* the old Python 2
+input() function entirely from ``__builtin__`` for safety. This is
+because forgetting to import the new ``input`` from ``future`` might
+otherwise lead to a security vulnerability (shell injection) on Python 2.
+
+To restore it, you can retrieve it yourself from
+``__builtin__._old_input``.
+
+Fortunately, ``input()`` seems to be seldom used in the wild in Python
+2...
+
+"""
+
+from future import utils
+
+
+if utils.PY2:
+ from io import open
+ from future_builtins import ascii, oct, hex
+ from __builtin__ import unichr as chr, pow as _builtin_pow
+ import __builtin__
+
+ # Only for backward compatibility with future v0.8.2:
+ isinstance = __builtin__.isinstance
+
+ # Warning: Python 2's input() is unsafe and MUST not be able to be used
+ # accidentally by someone who expects Python 3 semantics but forgets
+ # to import it on Python 2. Versions of ``future`` prior to 0.11
+ # deleted it from __builtin__. Now we keep in __builtin__ but shadow
+ # the name like all others. Just be sure to import ``input``.
+
+ input = raw_input
+
+ from future.builtins.newnext import newnext as next
+ from future.builtins.newround import newround as round
+ from future.builtins.newsuper import newsuper as super
+ from future.types.newint import newint
+
+ _SENTINEL = object()
+
+ def pow(x, y, z=_SENTINEL):
+ """
+ pow(x, y[, z]) -> number
+
+ With two arguments, equivalent to x**y. With three arguments,
+ equivalent to (x**y) % z, but may be more efficient (e.g. for ints).
+ """
+ # Handle newints
+ if isinstance(x, newint):
+ x = long(x)
+ if isinstance(y, newint):
+ y = long(y)
+ if isinstance(z, newint):
+ z = long(z)
+
+ try:
+ if z == _SENTINEL:
+ return _builtin_pow(x, y)
+ else:
+ return _builtin_pow(x, y, z)
+ except ValueError:
+ if z == _SENTINEL:
+ return _builtin_pow(x+0j, y)
+ else:
+ return _builtin_pow(x+0j, y, z)
+
+ # ``future`` doesn't support Py3.0/3.1. If we ever did, we'd add this:
+ # callable = __builtin__.callable
+
+ __all__ = ['ascii', 'chr', 'hex', 'input', 'isinstance', 'next', 'oct',
+ 'open', 'pow', 'round', 'super']
+
+else:
+ import builtins
+ ascii = builtins.ascii
+ chr = builtins.chr
+ hex = builtins.hex
+ input = builtins.input
+ next = builtins.next
+ # Only for backward compatibility with future v0.8.2:
+ isinstance = builtins.isinstance
+ oct = builtins.oct
+ open = builtins.open
+ pow = builtins.pow
+ round = builtins.round
+ super = builtins.super
+
+ __all__ = []
+
+ # The callable() function was removed from Py3.0 and 3.1 and
+ # reintroduced into Py3.2+. ``future`` doesn't support Py3.0/3.1. If we ever
+ # did, we'd add this:
+ # try:
+ # callable = builtins.callable
+ # except AttributeError:
+ # # Definition from Pandas
+ # def callable(obj):
+ # return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+ # __all__.append('callable')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/newnext.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/newnext.py
new file mode 100644
index 000000000..097638ac1
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/newnext.py
@@ -0,0 +1,70 @@
+'''
+This module provides a newnext() function in Python 2 that mimics the
+behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for
+compatibility if this fails.
+
+``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this
+doesn't exist, it falls back to calling a ``next()`` method.
+
+For example:
+
+ >>> class Odds(object):
+ ... def __init__(self, start=1):
+ ... self.value = start - 2
+ ... def __next__(self): # note the Py3 interface
+ ... self.value += 2
+ ... return self.value
+ ... def __iter__(self):
+ ... return self
+ ...
+ >>> iterator = Odds()
+ >>> next(iterator)
+ 1
+ >>> next(iterator)
+ 3
+
+If you are defining your own custom iterator class as above, it is preferable
+to explicitly decorate the class with the @implements_iterator decorator from
+``future.utils`` as follows:
+
+ >>> @implements_iterator
+ ... class Odds(object):
+ ... # etc
+ ... pass
+
+This next() function is primarily for consuming iterators defined in Python 3
+code elsewhere that we would like to run on Python 2 or 3.
+'''
+
+_builtin_next = next
+
+_SENTINEL = object()
+
+def newnext(iterator, default=_SENTINEL):
+ """
+ next(iterator[, default])
+
+ Return the next item from the iterator. If default is given and the iterator
+ is exhausted, it is returned instead of raising StopIteration.
+ """
+
+ # args = []
+ # if default is not _SENTINEL:
+ # args.append(default)
+ try:
+ try:
+ return iterator.__next__()
+ except AttributeError:
+ try:
+ return iterator.next()
+ except AttributeError:
+ raise TypeError("'{0}' object is not an iterator".format(
+ iterator.__class__.__name__))
+ except StopIteration as e:
+ if default is _SENTINEL:
+ raise e
+ else:
+ return default
+
+
+__all__ = ['newnext']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/newround.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/newround.py
new file mode 100644
index 000000000..3943ebb6e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/newround.py
@@ -0,0 +1,99 @@
+"""
+``python-future``: pure Python implementation of Python 3 round().
+"""
+
+from future.utils import PYPY, PY26, bind_method
+
+# Use the decimal module for simplicity of implementation (and
+# hopefully correctness).
+from decimal import Decimal, ROUND_HALF_EVEN
+
+
+def newround(number, ndigits=None):
+ """
+ See Python 3 documentation: uses Banker's Rounding.
+
+ Delegates to the __round__ method if for some reason this exists.
+
+ If not, rounds a number to a given precision in decimal digits (default
+ 0 digits). This returns an int when called with one argument,
+ otherwise the same type as the number. ndigits may be negative.
+
+ See the test_round method in future/tests/test_builtins.py for
+ examples.
+ """
+ return_int = False
+ if ndigits is None:
+ return_int = True
+ ndigits = 0
+ if hasattr(number, '__round__'):
+ return number.__round__(ndigits)
+
+ if ndigits < 0:
+ raise NotImplementedError('negative ndigits not supported yet')
+ exponent = Decimal('10') ** (-ndigits)
+
+ if PYPY:
+ # Work around issue #24: round() breaks on PyPy with NumPy's types
+ if 'numpy' in repr(type(number)):
+ number = float(number)
+
+ if not PY26:
+ d = Decimal.from_float(number).quantize(exponent,
+ rounding=ROUND_HALF_EVEN)
+ else:
+ d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN)
+
+ if return_int:
+ return int(d)
+ else:
+ return float(d)
+
+
+### From Python 2.7's decimal.py. Only needed to support Py2.6:
+
+def from_float_26(f):
+ """Converts a float to a decimal number, exactly.
+
+ Note that Decimal.from_float(0.1) is not the same as Decimal('0.1').
+ Since 0.1 is not exactly representable in binary floating point, the
+ value is stored as the nearest representable value which is
+ 0x1.999999999999ap-4. The exact equivalent of the value in decimal
+ is 0.1000000000000000055511151231257827021181583404541015625.
+
+ >>> Decimal.from_float(0.1)
+ Decimal('0.1000000000000000055511151231257827021181583404541015625')
+ >>> Decimal.from_float(float('nan'))
+ Decimal('NaN')
+ >>> Decimal.from_float(float('inf'))
+ Decimal('Infinity')
+ >>> Decimal.from_float(-float('inf'))
+ Decimal('-Infinity')
+ >>> Decimal.from_float(-0.0)
+ Decimal('-0')
+
+ """
+ import math as _math
+ from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3)
+
+ if isinstance(f, (int, long)): # handle integer inputs
+ return Decimal(f)
+ if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float
+ return Decimal(repr(f))
+ if _math.copysign(1.0, f) == 1.0:
+ sign = 0
+ else:
+ sign = 1
+ n, d = abs(f).as_integer_ratio()
+ # int.bit_length() method doesn't exist on Py2.6:
+ def bit_length(d):
+ if d != 0:
+ return len(bin(abs(d))) - 2
+ else:
+ return 0
+ k = bit_length(d) - 1
+ result = _dec_from_triple(sign, str(n*5**k), -k)
+ return result
+
+
+__all__ = ['newround']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/newsuper.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/newsuper.py
new file mode 100644
index 000000000..5d3402bd2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/newsuper.py
@@ -0,0 +1,114 @@
+'''
+This module provides a newsuper() function in Python 2 that mimics the
+behaviour of super() in Python 3. It is designed to be used as follows:
+
+ from __future__ import division, absolute_import, print_function
+ from future.builtins import super
+
+And then, for example:
+
+ class VerboseList(list):
+ def append(self, item):
+ print('Adding an item')
+ super().append(item) # new simpler super() function
+
+Importing this module on Python 3 has no effect.
+
+This is based on (i.e. almost identical to) Ryan Kelly's magicsuper
+module here:
+
+ https://github.com/rfk/magicsuper.git
+
+Excerpts from Ryan's docstring:
+
+ "Of course, you can still explicitly pass in the arguments if you want
+ to do something strange. Sometimes you really do want that, e.g. to
+ skip over some classes in the method resolution order.
+
+ "How does it work? By inspecting the calling frame to determine the
+ function object being executed and the object on which it's being
+ called, and then walking the object's __mro__ chain to find out where
+ that function was defined. Yuck, but it seems to work..."
+'''
+
+from __future__ import absolute_import
+import sys
+from types import FunctionType
+
+from future.utils import PY3, PY26
+
+
+_builtin_super = super
+
+_SENTINEL = object()
+
+def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1):
+ '''Like builtin super(), but capable of magic.
+
+ This acts just like the builtin super() function, but if called
+ without any arguments it attempts to infer them at runtime.
+ '''
+ # Infer the correct call if used without arguments.
+ if typ is _SENTINEL:
+ # We'll need to do some frame hacking.
+ f = sys._getframe(framedepth)
+
+ try:
+ # Get the function's first positional argument.
+ type_or_obj = f.f_locals[f.f_code.co_varnames[0]]
+ except (IndexError, KeyError,):
+ raise RuntimeError('super() used in a function with no args')
+
+ try:
+ # Get the MRO so we can crawl it.
+ mro = type_or_obj.__mro__
+ except (AttributeError, RuntimeError): # see issue #160
+ try:
+ mro = type_or_obj.__class__.__mro__
+ except AttributeError:
+ raise RuntimeError('super() used with a non-newstyle class')
+
+ # A ``for...else`` block? Yes! It's odd, but useful.
+ # If unfamiliar with for...else, see:
+ #
+ # http://psung.blogspot.com/2007/12/for-else-in-python.html
+ for typ in mro:
+ # Find the class that owns the currently-executing method.
+ for meth in typ.__dict__.values():
+ # Drill down through any wrappers to the underlying func.
+ # This handles e.g. classmethod() and staticmethod().
+ try:
+ while not isinstance(meth,FunctionType):
+ if isinstance(meth, property):
+ # Calling __get__ on the property will invoke
+ # user code which might throw exceptions or have
+ # side effects
+ meth = meth.fget
+ else:
+ try:
+ meth = meth.__func__
+ except AttributeError:
+ meth = meth.__get__(type_or_obj, typ)
+ except (AttributeError, TypeError):
+ continue
+ if meth.func_code is f.f_code:
+ break # Aha! Found you.
+ else:
+ continue # Not found! Move onto the next class in MRO.
+ break # Found! Break out of the search loop.
+ else:
+ raise RuntimeError('super() called outside a method')
+
+ # Dispatch to builtin super().
+ if type_or_obj is not _SENTINEL:
+ return _builtin_super(typ, type_or_obj)
+ return _builtin_super(typ)
+
+
+def superm(*args, **kwds):
+ f = sys._getframe(1)
+ nm = f.f_code.co_name
+ return getattr(newsuper(framedepth=2),nm)(*args, **kwds)
+
+
+__all__ = ['newsuper']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/__init__.py
new file mode 100644
index 000000000..040fdcf01
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/__init__.py
@@ -0,0 +1,8 @@
+# future.moves package
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+from future.standard_library import import_top_level_modules
+
+if sys.version_info[0] == 3:
+ import_top_level_modules()
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/_dummy_thread.py b/.install/.kodi/addons/script.module.future/libs/future/moves/_dummy_thread.py
new file mode 100644
index 000000000..688d249bb
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/_dummy_thread.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from _dummy_thread import *
+else:
+ __future_module__ = True
+ from dummy_thread import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/_markupbase.py b/.install/.kodi/addons/script.module.future/libs/future/moves/_markupbase.py
new file mode 100644
index 000000000..f9fb4bbf2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/_markupbase.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from _markupbase import *
+else:
+ __future_module__ = True
+ from markupbase import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/_thread.py b/.install/.kodi/addons/script.module.future/libs/future/moves/_thread.py
new file mode 100644
index 000000000..c68018bb1
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/_thread.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from _thread import *
+else:
+ __future_module__ = True
+ from thread import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/builtins.py b/.install/.kodi/addons/script.module.future/libs/future/moves/builtins.py
new file mode 100644
index 000000000..e4b6221d5
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/builtins.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from builtins import *
+else:
+ __future_module__ = True
+ from __builtin__ import *
+ # Overwrite any old definitions with the equivalent future.builtins ones:
+ from future.builtins import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/collections.py b/.install/.kodi/addons/script.module.future/libs/future/moves/collections.py
new file mode 100644
index 000000000..664ee6a3d
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/collections.py
@@ -0,0 +1,18 @@
+from __future__ import absolute_import
+import sys
+
+from future.utils import PY2, PY26
+__future_module__ = True
+
+from collections import *
+
+if PY2:
+ from UserDict import UserDict
+ from UserList import UserList
+ from UserString import UserString
+
+if PY26:
+ from future.backports.misc import OrderedDict, Counter
+
+if sys.version_info < (3, 3):
+ from future.backports.misc import ChainMap, _count_elements
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/configparser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/configparser.py
new file mode 100644
index 000000000..33d9cf953
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/configparser.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+from future.utils import PY2
+
+if PY2:
+ from ConfigParser import *
+else:
+ from configparser import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/copyreg.py b/.install/.kodi/addons/script.module.future/libs/future/moves/copyreg.py
new file mode 100644
index 000000000..21c7a42f2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/copyreg.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from copyreg import *
+else:
+ __future_module__ = True
+ from copy_reg import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/__init__.py
new file mode 100644
index 000000000..626b406f7
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/__init__.py
@@ -0,0 +1,20 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from dbm import *
+else:
+ __future_module__ = True
+ from whichdb import *
+ from anydbm import *
+
+# Py3.3's dbm/__init__.py imports ndbm but doesn't expose it via __all__.
+# In case some (badly written) code depends on dbm.ndbm after import dbm,
+# we simulate this:
+if PY3:
+ from dbm import ndbm
+else:
+ try:
+ from future.moves.dbm import ndbm
+ except ImportError:
+ ndbm = None
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/dumb.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/dumb.py
new file mode 100644
index 000000000..528383f6d
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/dumb.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from dbm.dumb import *
+else:
+ __future_module__ = True
+ from dumbdbm import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/gnu.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/gnu.py
new file mode 100644
index 000000000..68ccf67b9
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/gnu.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from dbm.gnu import *
+else:
+ __future_module__ = True
+ from gdbm import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/ndbm.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/ndbm.py
new file mode 100644
index 000000000..8c6fff8ab
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/ndbm.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from dbm.ndbm import *
+else:
+ __future_module__ = True
+ from dbm import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/html/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/html/__init__.py
new file mode 100644
index 000000000..22ed6e7d2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/html/__init__.py
@@ -0,0 +1,31 @@
+from __future__ import absolute_import
+from future.utils import PY3
+__future_module__ = True
+
+if PY3:
+ from html import *
+else:
+ # cgi.escape isn't good enough for the single Py3.3 html test to pass.
+ # Define it inline here instead. From the Py3.4 stdlib. Note that the
+ # html.escape() function from the Py3.3 stdlib is not suitable for use on
+ # Py2.x.
+ """
+ General functions for HTML manipulation.
+ """
+
+ def escape(s, quote=True):
+ """
+ Replace special characters "&", "<" and ">" to HTML-safe sequences.
+ If the optional flag quote is true (the default), the quotation mark
+ characters, both double quote (") and single quote (') characters are also
+ translated.
+ """
+ s = s.replace("&", "&") # Must be done first!
+ s = s.replace("<", "<")
+ s = s.replace(">", ">")
+ if quote:
+ s = s.replace('"', """)
+ s = s.replace('\'', "'")
+ return s
+
+ __all__ = ['escape']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/html/entities.py b/.install/.kodi/addons/script.module.future/libs/future/moves/html/entities.py
new file mode 100644
index 000000000..56a886091
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/html/entities.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from html.entities import *
+else:
+ __future_module__ = True
+ from htmlentitydefs import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/html/parser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/html/parser.py
new file mode 100644
index 000000000..a6115b59f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/html/parser.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+__future_module__ = True
+
+if PY3:
+ from html.parser import *
+else:
+ from HTMLParser import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/__init__.py
new file mode 100644
index 000000000..917b3d71a
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/__init__.py
@@ -0,0 +1,4 @@
+from future.utils import PY3
+
+if not PY3:
+ __future_module__ = True
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/client.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/client.py
new file mode 100644
index 000000000..55f9c9c1a
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/client.py
@@ -0,0 +1,8 @@
+from future.utils import PY3
+
+if PY3:
+ from http.client import *
+else:
+ from httplib import *
+ from httplib import HTTPMessage
+ __future_module__ = True
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookiejar.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookiejar.py
new file mode 100644
index 000000000..ea00df772
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookiejar.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from http.cookiejar import *
+else:
+ __future_module__ = True
+ from cookielib import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookies.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookies.py
new file mode 100644
index 000000000..1b74fe2dd
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookies.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from http.cookies import *
+else:
+ __future_module__ = True
+ from Cookie import *
+ from Cookie import Morsel # left out of __all__ on Py2.7!
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/server.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/server.py
new file mode 100644
index 000000000..4e75cc1de
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/server.py
@@ -0,0 +1,20 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from http.server import *
+else:
+ __future_module__ = True
+ from BaseHTTPServer import *
+ from CGIHTTPServer import *
+ from SimpleHTTPServer import *
+ try:
+ from CGIHTTPServer import _url_collapse_path # needed for a test
+ except ImportError:
+ try:
+ # Python 2.7.0 to 2.7.3
+ from CGIHTTPServer import (
+ _url_collapse_path_split as _url_collapse_path)
+ except ImportError:
+ # Doesn't exist on Python 2.6.x. Ignore it.
+ pass
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/itertools.py b/.install/.kodi/addons/script.module.future/libs/future/moves/itertools.py
new file mode 100644
index 000000000..e5eb20d5d
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/itertools.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+from itertools import *
+try:
+ zip_longest = izip_longest
+ filterfalse = ifilterfalse
+except NameError:
+ pass
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/pickle.py b/.install/.kodi/addons/script.module.future/libs/future/moves/pickle.py
new file mode 100644
index 000000000..c53d69392
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/pickle.py
@@ -0,0 +1,11 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from pickle import *
+else:
+ __future_module__ = True
+ try:
+ from cPickle import *
+ except ImportError:
+ from pickle import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/queue.py b/.install/.kodi/addons/script.module.future/libs/future/moves/queue.py
new file mode 100644
index 000000000..1cb1437d7
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/queue.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from queue import *
+else:
+ __future_module__ = True
+ from Queue import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/reprlib.py b/.install/.kodi/addons/script.module.future/libs/future/moves/reprlib.py
new file mode 100644
index 000000000..a313a13a4
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/reprlib.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from reprlib import *
+else:
+ __future_module__ = True
+ from repr import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/socketserver.py b/.install/.kodi/addons/script.module.future/libs/future/moves/socketserver.py
new file mode 100644
index 000000000..062e0848d
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/socketserver.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from socketserver import *
+else:
+ __future_module__ = True
+ from SocketServer import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/subprocess.py b/.install/.kodi/addons/script.module.future/libs/future/moves/subprocess.py
new file mode 100644
index 000000000..43ffd2ac2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/subprocess.py
@@ -0,0 +1,11 @@
+from __future__ import absolute_import
+from future.utils import PY2, PY26
+
+from subprocess import *
+
+if PY2:
+ __future_module__ = True
+ from commands import getoutput, getstatusoutput
+
+if PY26:
+ from future.backports.misc import check_output
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/sys.py b/.install/.kodi/addons/script.module.future/libs/future/moves/sys.py
new file mode 100644
index 000000000..1293bcb07
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/sys.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+from future.utils import PY2
+
+from sys import *
+
+if PY2:
+ from __builtin__ import intern
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/test/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/test/__init__.py
new file mode 100644
index 000000000..5cf428b6e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/test/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if not PY3:
+ __future_module__ = True
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/test/support.py b/.install/.kodi/addons/script.module.future/libs/future/moves/test/support.py
new file mode 100644
index 000000000..e9aa0f48f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/test/support.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+from future.standard_library import suspend_hooks
+from future.utils import PY3
+
+if PY3:
+ from test.support import *
+else:
+ __future_module__ = True
+ with suspend_hooks():
+ from test.test_support import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/__init__.py
new file mode 100644
index 000000000..e40829663
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/__init__.py
@@ -0,0 +1,27 @@
+from __future__ import absolute_import
+from future.utils import PY3
+__future_module__ = True
+
+if not PY3:
+ from Tkinter import *
+ from Tkinter import (_cnfmerge, _default_root, _flatten,
+ _support_default_root, _test,
+ _tkinter, _setit)
+
+ try: # >= 2.7.4
+ from Tkinter import (_join)
+ except ImportError:
+ pass
+
+ try: # >= 2.7.4
+ from Tkinter import (_stringify)
+ except ImportError:
+ pass
+
+ try: # >= 2.7.9
+ from Tkinter import (_splitdict)
+ except ImportError:
+ pass
+
+else:
+ from tkinter import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/colorchooser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/colorchooser.py
new file mode 100644
index 000000000..6dde6e8d3
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/colorchooser.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.colorchooser import *
+else:
+ try:
+ from tkColorChooser import *
+ except ImportError:
+ raise ImportError('The tkColorChooser module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/commondialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/commondialog.py
new file mode 100644
index 000000000..eb7ae8d60
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/commondialog.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.commondialog import *
+else:
+ try:
+ from tkCommonDialog import *
+ except ImportError:
+ raise ImportError('The tkCommonDialog module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/constants.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/constants.py
new file mode 100644
index 000000000..ffe098152
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/constants.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.constants import *
+else:
+ try:
+ from Tkconstants import *
+ except ImportError:
+ raise ImportError('The Tkconstants module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dialog.py
new file mode 100644
index 000000000..113370ca2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dialog.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.dialog import *
+else:
+ try:
+ from Dialog import *
+ except ImportError:
+ raise ImportError('The Dialog module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dnd.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dnd.py
new file mode 100644
index 000000000..1ab437917
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dnd.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.dnd import *
+else:
+ try:
+ from Tkdnd import *
+ except ImportError:
+ raise ImportError('The Tkdnd module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/filedialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/filedialog.py
new file mode 100644
index 000000000..973923e2c
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/filedialog.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.filedialog import *
+else:
+ try:
+ from FileDialog import *
+ except ImportError:
+ raise ImportError('The FileDialog module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/font.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/font.py
new file mode 100644
index 000000000..628f399a3
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/font.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.font import *
+else:
+ try:
+ from tkFont import *
+ except ImportError:
+ raise ImportError('The tkFont module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/messagebox.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/messagebox.py
new file mode 100644
index 000000000..b43d8702f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/messagebox.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.messagebox import *
+else:
+ try:
+ from tkMessageBox import *
+ except ImportError:
+ raise ImportError('The tkMessageBox module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/scrolledtext.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/scrolledtext.py
new file mode 100644
index 000000000..1c69db606
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/scrolledtext.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.scrolledtext import *
+else:
+ try:
+ from ScrolledText import *
+ except ImportError:
+ raise ImportError('The ScrolledText module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/simpledialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/simpledialog.py
new file mode 100644
index 000000000..dba93fbf2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/simpledialog.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.simpledialog import *
+else:
+ try:
+ from SimpleDialog import *
+ except ImportError:
+ raise ImportError('The SimpleDialog module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/tix.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/tix.py
new file mode 100644
index 000000000..8d1718ad0
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/tix.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.tix import *
+else:
+ try:
+ from Tix import *
+ except ImportError:
+ raise ImportError('The Tix module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/ttk.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/ttk.py
new file mode 100644
index 000000000..081c1b495
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/ttk.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+from future.utils import PY3
+
+if PY3:
+ from tkinter.ttk import *
+else:
+ try:
+ from ttk import *
+ except ImportError:
+ raise ImportError('The ttk module is missing. Does your Py2 '
+ 'installation include tkinter?')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/__init__.py
new file mode 100644
index 000000000..5cf428b6e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if not PY3:
+ __future_module__ = True
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/error.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/error.py
new file mode 100644
index 000000000..7d8ada73f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/error.py
@@ -0,0 +1,16 @@
+from __future__ import absolute_import
+from future.standard_library import suspend_hooks
+
+from future.utils import PY3
+
+if PY3:
+ from urllib.error import *
+else:
+ __future_module__ = True
+
+ # We use this method to get at the original Py2 urllib before any renaming magic
+ # ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError
+
+ with suspend_hooks():
+ from urllib import ContentTooShortError
+ from urllib2 import URLError, HTTPError
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/parse.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/parse.py
new file mode 100644
index 000000000..9074b8163
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/parse.py
@@ -0,0 +1,28 @@
+from __future__ import absolute_import
+from future.standard_library import suspend_hooks
+
+from future.utils import PY3
+
+if PY3:
+ from urllib.parse import *
+else:
+ __future_module__ = True
+ from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl,
+ urldefrag, urljoin, urlparse, urlsplit,
+ urlunparse, urlunsplit)
+
+ # we use this method to get at the original py2 urllib before any renaming
+ # quote = sys.py2_modules['urllib'].quote
+ # quote_plus = sys.py2_modules['urllib'].quote_plus
+ # unquote = sys.py2_modules['urllib'].unquote
+ # unquote_plus = sys.py2_modules['urllib'].unquote_plus
+ # urlencode = sys.py2_modules['urllib'].urlencode
+ # splitquery = sys.py2_modules['urllib'].splitquery
+
+ with suspend_hooks():
+ from urllib import (quote,
+ quote_plus,
+ unquote,
+ unquote_plus,
+ urlencode,
+ splitquery)
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/request.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/request.py
new file mode 100644
index 000000000..60e440a77
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/request.py
@@ -0,0 +1,93 @@
+from __future__ import absolute_import
+
+from future.standard_library import suspend_hooks
+from future.utils import PY3
+
+if PY3:
+ from urllib.request import *
+ # This aren't in __all__:
+ from urllib.request import (getproxies,
+ pathname2url,
+ proxy_bypass,
+ quote,
+ request_host,
+ splitattr,
+ splithost,
+ splitpasswd,
+ splitport,
+ splitquery,
+ splittag,
+ splittype,
+ splituser,
+ splitvalue,
+ thishost,
+ to_bytes,
+ unquote,
+ unwrap,
+ url2pathname,
+ urlcleanup,
+ urljoin,
+ urlopen,
+ urlparse,
+ urlretrieve,
+ urlsplit,
+ urlunparse)
+else:
+ __future_module__ = True
+ with suspend_hooks():
+ from urllib import *
+ from urllib2 import *
+ from urlparse import *
+
+ # Rename:
+ from urllib import toBytes # missing from __all__ on Py2.6
+ to_bytes = toBytes
+
+ # from urllib import (pathname2url,
+ # url2pathname,
+ # getproxies,
+ # urlretrieve,
+ # urlcleanup,
+ # URLopener,
+ # FancyURLopener,
+ # proxy_bypass)
+
+ # from urllib2 import (
+ # AbstractBasicAuthHandler,
+ # AbstractDigestAuthHandler,
+ # BaseHandler,
+ # CacheFTPHandler,
+ # FileHandler,
+ # FTPHandler,
+ # HTTPBasicAuthHandler,
+ # HTTPCookieProcessor,
+ # HTTPDefaultErrorHandler,
+ # HTTPDigestAuthHandler,
+ # HTTPErrorProcessor,
+ # HTTPHandler,
+ # HTTPPasswordMgr,
+ # HTTPPasswordMgrWithDefaultRealm,
+ # HTTPRedirectHandler,
+ # HTTPSHandler,
+ # URLError,
+ # build_opener,
+ # install_opener,
+ # OpenerDirector,
+ # ProxyBasicAuthHandler,
+ # ProxyDigestAuthHandler,
+ # ProxyHandler,
+ # Request,
+ # UnknownHandler,
+ # urlopen,
+ # )
+
+ # from urlparse import (
+ # urldefrag
+ # urljoin,
+ # urlparse,
+ # urlunparse,
+ # urlsplit,
+ # urlunsplit,
+ # parse_qs,
+ # parse_q"
+ # )
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/response.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/response.py
new file mode 100644
index 000000000..a287ae283
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/response.py
@@ -0,0 +1,12 @@
+from future import standard_library
+from future.utils import PY3
+
+if PY3:
+ from urllib.response import *
+else:
+ __future_module__ = True
+ with standard_library.suspend_hooks():
+ from urllib import (addbase,
+ addclosehook,
+ addinfo,
+ addinfourl)
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/robotparser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/robotparser.py
new file mode 100644
index 000000000..0dc8f5715
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/robotparser.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from urllib.robotparser import *
+else:
+ __future_module__ = True
+ from robotparser import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/winreg.py b/.install/.kodi/addons/script.module.future/libs/future/moves/winreg.py
new file mode 100644
index 000000000..c8b147568
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/winreg.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from winreg import *
+else:
+ __future_module__ = True
+ from _winreg import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/client.py b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/client.py
new file mode 100644
index 000000000..4708cf899
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/client.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from xmlrpc.client import *
+else:
+ from xmlrpclib import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/server.py b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/server.py
new file mode 100644
index 000000000..1a8af3454
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/server.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from xmlrpc.server import *
+else:
+ from xmlrpclib import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_dummy_thread/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_dummy_thread/__init__.py
new file mode 100644
index 000000000..63dced6e5
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_dummy_thread/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+
+if sys.version_info[0] < 3:
+ from dummy_thread import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_markupbase/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_markupbase/__init__.py
new file mode 100644
index 000000000..290906540
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_markupbase/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+
+if sys.version_info[0] < 3:
+ from markupbase import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_thread/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_thread/__init__.py
new file mode 100644
index 000000000..9f2a51c75
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_thread/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+
+if sys.version_info[0] < 3:
+ from thread import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/copyreg/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/copyreg/__init__.py
new file mode 100644
index 000000000..51bd4b9a7
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/copyreg/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+import sys
+
+if sys.version_info[0] < 3:
+ from copy_reg import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/__init__.py
new file mode 100644
index 000000000..e957e7457
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+import sys
+
+if sys.version_info[0] < 3:
+ from future.moves.html import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/entities.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/entities.py
new file mode 100644
index 000000000..211649e53
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/entities.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+if PY3:
+ from html.entities import *
+else:
+ from future.moves.html.entities import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/parser.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/parser.py
new file mode 100644
index 000000000..541def391
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/parser.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+
+if sys.version_info[0] == 3:
+ raise ImportError('Cannot import module from python-future source folder')
+else:
+ from future.moves.html.parser import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/__init__.py
new file mode 100644
index 000000000..e4f853e53
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+import sys
+
+if sys.version_info[0] < 3:
+ pass
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/client.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/client.py
new file mode 100644
index 000000000..7566fe4dc
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/client.py
@@ -0,0 +1,91 @@
+from __future__ import absolute_import
+import sys
+
+assert sys.version_info[0] < 3
+
+from httplib import *
+from httplib import HTTPMessage
+
+# These constants aren't included in __all__ in httplib.py:
+
+from httplib import (HTTP_PORT,
+ HTTPS_PORT,
+
+ _CS_IDLE,
+ _CS_REQ_STARTED,
+ _CS_REQ_SENT,
+
+ CONTINUE,
+ SWITCHING_PROTOCOLS,
+ PROCESSING,
+
+ OK,
+ CREATED,
+ ACCEPTED,
+ NON_AUTHORITATIVE_INFORMATION,
+ NO_CONTENT,
+ RESET_CONTENT,
+ PARTIAL_CONTENT,
+ MULTI_STATUS,
+ IM_USED,
+
+ MULTIPLE_CHOICES,
+ MOVED_PERMANENTLY,
+ FOUND,
+ SEE_OTHER,
+ NOT_MODIFIED,
+ USE_PROXY,
+ TEMPORARY_REDIRECT,
+
+ BAD_REQUEST,
+ UNAUTHORIZED,
+ PAYMENT_REQUIRED,
+ FORBIDDEN,
+ NOT_FOUND,
+ METHOD_NOT_ALLOWED,
+ NOT_ACCEPTABLE,
+ PROXY_AUTHENTICATION_REQUIRED,
+ REQUEST_TIMEOUT,
+ CONFLICT,
+ GONE,
+ LENGTH_REQUIRED,
+ PRECONDITION_FAILED,
+ REQUEST_ENTITY_TOO_LARGE,
+ REQUEST_URI_TOO_LONG,
+ UNSUPPORTED_MEDIA_TYPE,
+ REQUESTED_RANGE_NOT_SATISFIABLE,
+ EXPECTATION_FAILED,
+ UNPROCESSABLE_ENTITY,
+ LOCKED,
+ FAILED_DEPENDENCY,
+ UPGRADE_REQUIRED,
+
+ INTERNAL_SERVER_ERROR,
+ NOT_IMPLEMENTED,
+ BAD_GATEWAY,
+ SERVICE_UNAVAILABLE,
+ GATEWAY_TIMEOUT,
+ HTTP_VERSION_NOT_SUPPORTED,
+ INSUFFICIENT_STORAGE,
+ NOT_EXTENDED,
+
+ MAXAMOUNT,
+ )
+
+# These are not available on Python 2.6.x:
+try:
+ from httplib import LineTooLong, LineAndFileWrapper
+except ImportError:
+ pass
+
+# These may not be available on all versions of Python 2.6.x or 2.7.x
+try:
+ from httplib import (
+ _MAXLINE,
+ _MAXHEADERS,
+ _is_legal_header_name,
+ _is_illegal_header_value,
+ _METHODS_EXPECTING_BODY
+ )
+except ImportError:
+ pass
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookiejar.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookiejar.py
new file mode 100644
index 000000000..d847b2bf2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookiejar.py
@@ -0,0 +1,6 @@
+from __future__ import absolute_import
+import sys
+
+assert sys.version_info[0] < 3
+
+from cookielib import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookies.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookies.py
new file mode 100644
index 000000000..eb2a82388
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookies.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+import sys
+
+assert sys.version_info[0] < 3
+
+from Cookie import *
+from Cookie import Morsel # left out of __all__ on Py2.7!
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/server.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/server.py
new file mode 100644
index 000000000..297105578
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/server.py
@@ -0,0 +1,18 @@
+from __future__ import absolute_import
+import sys
+
+assert sys.version_info[0] < 3
+
+from BaseHTTPServer import *
+from CGIHTTPServer import *
+from SimpleHTTPServer import *
+try:
+ from CGIHTTPServer import _url_collapse_path # needed for a test
+except ImportError:
+ try:
+ # Python 2.7.0 to 2.7.3
+ from CGIHTTPServer import (
+ _url_collapse_path_split as _url_collapse_path)
+ except ImportError:
+ # Doesn't exist on Python 2.6.x. Ignore it.
+ pass
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/queue/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/queue/__init__.py
new file mode 100644
index 000000000..22bd296b6
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/queue/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+
+if sys.version_info[0] < 3:
+ from Queue import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/reprlib/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/reprlib/__init__.py
new file mode 100644
index 000000000..6ccf9c006
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/reprlib/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+import sys
+
+if sys.version_info[0] < 3:
+ from repr import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/socketserver/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/socketserver/__init__.py
new file mode 100644
index 000000000..c5b8c9c28
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/socketserver/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+import sys
+
+if sys.version_info[0] < 3:
+ from SocketServer import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/winreg/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/winreg/__init__.py
new file mode 100644
index 000000000..97243bbb8
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/winreg/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+import sys
+__future_module__ = True
+
+if sys.version_info[0] < 3:
+ from _winreg import *
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/__init__.py
new file mode 100644
index 000000000..e4f853e53
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+import sys
+
+if sys.version_info[0] < 3:
+ pass
+else:
+ raise ImportError('This package should not be accessible on Python 3. '
+ 'Either you are trying to run from the python-future src folder '
+ 'or your installation of python-future is corrupted.')
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/client.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/client.py
new file mode 100644
index 000000000..a8d0827e9
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/client.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+import sys
+
+assert sys.version_info[0] < 3
+from xmlrpclib import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/server.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/server.py
new file mode 100644
index 000000000..a8d0827e9
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/server.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+import sys
+
+assert sys.version_info[0] < 3
+from xmlrpclib import *
diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/__init__.py
new file mode 100644
index 000000000..e64568f2f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/__init__.py
@@ -0,0 +1,826 @@
+"""
+Python 3 reorganized the standard library (PEP 3108). This module exposes
+several standard library modules to Python 2 under their new Python 3
+names.
+
+It is designed to be used as follows::
+
+ from future import standard_library
+ standard_library.install_aliases()
+
+And then these normal Py3 imports work on both Py3 and Py2::
+
+ import builtins
+ import copyreg
+ import queue
+ import reprlib
+ import socketserver
+ import winreg # on Windows only
+ import test.support
+ import html, html.parser, html.entites
+ import http, http.client, http.server
+ import http.cookies, http.cookiejar
+ import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser
+ import xmlrpc.client, xmlrpc.server
+
+ import _thread
+ import _dummy_thread
+ import _markupbase
+
+ from itertools import filterfalse, zip_longest
+ from sys import intern
+ from collections import UserDict, UserList, UserString
+ from collections import OrderedDict, Counter, ChainMap # even on Py2.6
+ from subprocess import getoutput, getstatusoutput
+ from subprocess import check_output # even on Py2.6
+
+(The renamed modules and functions are still available under their old
+names on Python 2.)
+
+This is a cleaner alternative to this idiom (see
+http://docs.pythonsprints.com/python3_porting/py-porting.html)::
+
+ try:
+ import queue
+ except ImportError:
+ import Queue as queue
+
+
+Limitations
+-----------
+We don't currently support these modules, but would like to::
+
+ import dbm
+ import dbm.dumb
+ import dbm.gnu
+ import collections.abc # on Py33
+ import pickle # should (optionally) bring in cPickle on Python 2
+
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+import logging
+import imp
+import contextlib
+import types
+import copy
+import os
+
+# Make a dedicated logger; leave the root logger to be configured
+# by the application.
+flog = logging.getLogger('future_stdlib')
+_formatter = logging.Formatter(logging.BASIC_FORMAT)
+_handler = logging.StreamHandler()
+_handler.setFormatter(_formatter)
+flog.addHandler(_handler)
+flog.setLevel(logging.WARN)
+
+from future.utils import PY2, PY3
+
+# Added by Roman V.M. for using in Kodi
+if PY2:
+ from xbmcaddon import Addon
+ sys.path.append(os.path.join(
+ Addon('script.module.future').getAddonInfo('path').decode('utf-8'),
+ 'libs',
+ 'future',
+ 'standard_library',
+ 'Lib')
+ )
+
+# The modules that are defined under the same names on Py3 but with
+# different contents in a significant way (e.g. submodules) are:
+# pickle (fast one)
+# dbm
+# urllib
+# test
+# email
+
+REPLACED_MODULES = set(['test', 'urllib', 'pickle', 'dbm']) # add email and dbm when we support it
+
+# The following module names are not present in Python 2.x, so they cause no
+# potential clashes between the old and new names:
+# http
+# html
+# tkinter
+# xmlrpc
+# Keys: Py2 / real module names
+# Values: Py3 / simulated module names
+RENAMES = {
+ # 'cStringIO': 'io', # there's a new io module in Python 2.6
+ # that provides StringIO and BytesIO
+ # 'StringIO': 'io', # ditto
+ # 'cPickle': 'pickle',
+ '__builtin__': 'builtins',
+ 'copy_reg': 'copyreg',
+ 'Queue': 'queue',
+ 'future.moves.socketserver': 'socketserver',
+ 'ConfigParser': 'configparser',
+ 'repr': 'reprlib',
+ # 'FileDialog': 'tkinter.filedialog',
+ # 'tkFileDialog': 'tkinter.filedialog',
+ # 'SimpleDialog': 'tkinter.simpledialog',
+ # 'tkSimpleDialog': 'tkinter.simpledialog',
+ # 'tkColorChooser': 'tkinter.colorchooser',
+ # 'tkCommonDialog': 'tkinter.commondialog',
+ # 'Dialog': 'tkinter.dialog',
+ # 'Tkdnd': 'tkinter.dnd',
+ # 'tkFont': 'tkinter.font',
+ # 'tkMessageBox': 'tkinter.messagebox',
+ # 'ScrolledText': 'tkinter.scrolledtext',
+ # 'Tkconstants': 'tkinter.constants',
+ # 'Tix': 'tkinter.tix',
+ # 'ttk': 'tkinter.ttk',
+ # 'Tkinter': 'tkinter',
+ '_winreg': 'winreg',
+ 'thread': '_thread',
+ 'dummy_thread': '_dummy_thread',
+ # 'anydbm': 'dbm', # causes infinite import loop
+ # 'whichdb': 'dbm', # causes infinite import loop
+ # anydbm and whichdb are handled by fix_imports2
+ # 'dbhash': 'dbm.bsd',
+ # 'dumbdbm': 'dbm.dumb',
+ # 'dbm': 'dbm.ndbm',
+ # 'gdbm': 'dbm.gnu',
+ 'future.moves.xmlrpc': 'xmlrpc',
+ # 'future.backports.email': 'email', # for use by urllib
+ # 'DocXMLRPCServer': 'xmlrpc.server',
+ # 'SimpleXMLRPCServer': 'xmlrpc.server',
+ # 'httplib': 'http.client',
+ # 'htmlentitydefs' : 'html.entities',
+ # 'HTMLParser' : 'html.parser',
+ # 'Cookie': 'http.cookies',
+ # 'cookielib': 'http.cookiejar',
+ # 'BaseHTTPServer': 'http.server',
+ # 'SimpleHTTPServer': 'http.server',
+ # 'CGIHTTPServer': 'http.server',
+ # 'future.backports.test': 'test', # primarily for renaming test_support to support
+ # 'commands': 'subprocess',
+ # 'urlparse' : 'urllib.parse',
+ # 'robotparser' : 'urllib.robotparser',
+ # 'abc': 'collections.abc', # for Py33
+ # 'future.utils.six.moves.html': 'html',
+ # 'future.utils.six.moves.http': 'http',
+ 'future.moves.html': 'html',
+ 'future.moves.http': 'http',
+ # 'future.backports.urllib': 'urllib',
+ # 'future.utils.six.moves.urllib': 'urllib',
+ 'future.moves._markupbase': '_markupbase',
+ }
+
+
+# It is complicated and apparently brittle to mess around with the
+# ``sys.modules`` cache in order to support "import urllib" meaning two
+# different things (Py2.7 urllib and backported Py3.3-like urllib) in different
+# contexts. So we require explicit imports for these modules.
+assert len(set(RENAMES.values()) & set(REPLACED_MODULES)) == 0
+
+
+# Harmless renames that we can insert.
+# These modules need names from elsewhere being added to them:
+# subprocess: should provide getoutput and other fns from commands
+# module but these fns are missing: getstatus, mk2arg,
+# mkarg
+# re: needs an ASCII constant that works compatibly with Py3
+
+# etc: see lib2to3/fixes/fix_imports.py
+
+# (New module name, new object name, old module name, old object name)
+MOVES = [('collections', 'UserList', 'UserList', 'UserList'),
+ ('collections', 'UserDict', 'UserDict', 'UserDict'),
+ ('collections', 'UserString','UserString', 'UserString'),
+ ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'),
+ ('itertools', 'filterfalse','itertools', 'ifilterfalse'),
+ ('itertools', 'zip_longest','itertools', 'izip_longest'),
+ ('sys', 'intern','__builtin__', 'intern'),
+ # The re module has no ASCII flag in Py2, but this is the default.
+ # Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one
+ # (and it exists on Py2.6+).
+ ('re', 'ASCII','stat', 'ST_MODE'),
+ ('base64', 'encodebytes','base64', 'encodestring'),
+ ('base64', 'decodebytes','base64', 'decodestring'),
+ ('subprocess', 'getoutput', 'commands', 'getoutput'),
+ ('subprocess', 'getstatusoutput', 'commands', 'getstatusoutput'),
+ ('subprocess', 'check_output', 'future.backports.misc', 'check_output'),
+ ('math', 'ceil', 'future.backports.misc', 'ceil'),
+ ('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'),
+ ('collections', 'Counter', 'future.backports.misc', 'Counter'),
+ ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'),
+ ('itertools', 'count', 'future.backports.misc', 'count'),
+ ('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'),
+ ('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'),
+
+# This is no use, since "import urllib.request" etc. still fails:
+# ('urllib', 'error', 'future.moves.urllib', 'error'),
+# ('urllib', 'parse', 'future.moves.urllib', 'parse'),
+# ('urllib', 'request', 'future.moves.urllib', 'request'),
+# ('urllib', 'response', 'future.moves.urllib', 'response'),
+# ('urllib', 'robotparser', 'future.moves.urllib', 'robotparser'),
+ ]
+
+
+# A minimal example of an import hook:
+# class WarnOnImport(object):
+# def __init__(self, *args):
+# self.module_names = args
+#
+# def find_module(self, fullname, path=None):
+# if fullname in self.module_names:
+# self.path = path
+# return self
+# return None
+#
+# def load_module(self, name):
+# if name in sys.modules:
+# return sys.modules[name]
+# module_info = imp.find_module(name, self.path)
+# module = imp.load_module(name, *module_info)
+# sys.modules[name] = module
+# flog.warning("Imported deprecated module %s", name)
+# return module
+
+
+class RenameImport(object):
+ """
+ A class for import hooks mapping Py3 module names etc. to the Py2 equivalents.
+ """
+ # Different RenameImport classes are created when importing this module from
+ # different source files. This causes isinstance(hook, RenameImport) checks
+ # to produce inconsistent results. We add this RENAMER attribute here so
+ # remove_hooks() and install_hooks() can find instances of these classes
+ # easily:
+ RENAMER = True
+
+ def __init__(self, old_to_new):
+ '''
+ Pass in a dictionary-like object mapping from old names to new
+ names. E.g. {'ConfigParser': 'configparser', 'cPickle': 'pickle'}
+ '''
+ self.old_to_new = old_to_new
+ both = set(old_to_new.keys()) & set(old_to_new.values())
+ assert (len(both) == 0 and
+ len(set(old_to_new.values())) == len(old_to_new.values())), \
+ 'Ambiguity in renaming (handler not implemented)'
+ self.new_to_old = dict((new, old) for (old, new) in old_to_new.items())
+
+ def find_module(self, fullname, path=None):
+ # Handles hierarchical importing: package.module.module2
+ new_base_names = set([s.split('.')[0] for s in self.new_to_old])
+ # Before v0.12: Was: if fullname in set(self.old_to_new) | new_base_names:
+ if fullname in new_base_names:
+ return self
+ return None
+
+ def load_module(self, name):
+ path = None
+ if name in sys.modules:
+ return sys.modules[name]
+ elif name in self.new_to_old:
+ # New name. Look up the corresponding old (Py2) name:
+ oldname = self.new_to_old[name]
+ module = self._find_and_load_module(oldname)
+ # module.__future_module__ = True
+ else:
+ module = self._find_and_load_module(name)
+ # In any case, make it available under the requested (Py3) name
+ sys.modules[name] = module
+ return module
+
+ def _find_and_load_module(self, name, path=None):
+ """
+ Finds and loads it. But if there's a . in the name, handles it
+ properly.
+ """
+ bits = name.split('.')
+ while len(bits) > 1:
+ # Treat the first bit as a package
+ packagename = bits.pop(0)
+ package = self._find_and_load_module(packagename, path)
+ try:
+ path = package.__path__
+ except AttributeError:
+ # This could be e.g. moves.
+ flog.debug('Package {0} has no __path__.'.format(package))
+ if name in sys.modules:
+ return sys.modules[name]
+ flog.debug('What to do here?')
+
+ name = bits[0]
+ module_info = imp.find_module(name, path)
+ return imp.load_module(name, *module_info)
+
+
+class hooks(object):
+ """
+ Acts as a context manager. Saves the state of sys.modules and restores it
+ after the 'with' block.
+
+ Use like this:
+
+ >>> from future import standard_library
+ >>> with standard_library.hooks():
+ ... import http.client
+ >>> import requests
+
+ For this to work, http.client will be scrubbed from sys.modules after the
+ 'with' block. That way the modules imported in the 'with' block will
+ continue to be accessible in the current namespace but not from any
+ imported modules (like requests).
+ """
+ def __enter__(self):
+ # flog.debug('Entering hooks context manager')
+ self.old_sys_modules = copy.copy(sys.modules)
+ self.hooks_were_installed = detect_hooks()
+ # self.scrubbed = scrub_py2_sys_modules()
+ install_hooks()
+ return self
+
+ def __exit__(self, *args):
+ # flog.debug('Exiting hooks context manager')
+ # restore_sys_modules(self.scrubbed)
+ if not self.hooks_were_installed:
+ remove_hooks()
+ # scrub_future_sys_modules()
+
+# Sanity check for is_py2_stdlib_module(): We aren't replacing any
+# builtin modules names:
+if PY2:
+ assert len(set(RENAMES.values()) & set(sys.builtin_module_names)) == 0
+
+
+def is_py2_stdlib_module(m):
+ """
+ Tries to infer whether the module m is from the Python 2 standard library.
+ This may not be reliable on all systems.
+ """
+ if PY3:
+ return False
+ if not 'stdlib_path' in is_py2_stdlib_module.__dict__:
+ stdlib_files = [contextlib.__file__, os.__file__, copy.__file__]
+ stdlib_paths = [os.path.split(f)[0] for f in stdlib_files]
+ if not len(set(stdlib_paths)) == 1:
+ # This seems to happen on travis-ci.org. Very strange. We'll try to
+ # ignore it.
+ flog.warn('Multiple locations found for the Python standard '
+ 'library: %s' % stdlib_paths)
+ # Choose the first one arbitrarily
+ is_py2_stdlib_module.stdlib_path = stdlib_paths[0]
+
+ if m.__name__ in sys.builtin_module_names:
+ return True
+
+ if hasattr(m, '__file__'):
+ modpath = os.path.split(m.__file__)
+ if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and
+ 'site-packages' not in modpath[0]):
+ return True
+
+ return False
+
+
+def scrub_py2_sys_modules():
+ """
+ Removes any Python 2 standard library modules from ``sys.modules`` that
+ would interfere with Py3-style imports using import hooks. Examples are
+ modules with the same names (like urllib or email).
+
+ (Note that currently import hooks are disabled for modules like these
+ with ambiguous names anyway ...)
+ """
+ if PY3:
+ return {}
+ scrubbed = {}
+ for modulename in REPLACED_MODULES & set(RENAMES.keys()):
+ if not modulename in sys.modules:
+ continue
+
+ module = sys.modules[modulename]
+
+ if is_py2_stdlib_module(module):
+ flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename))
+ scrubbed[modulename] = sys.modules[modulename]
+ del sys.modules[modulename]
+ return scrubbed
+
+
+def scrub_future_sys_modules():
+ """
+ Deprecated.
+ """
+ return {}
+
+class suspend_hooks(object):
+ """
+ Acts as a context manager. Use like this:
+
+ >>> from future import standard_library
+ >>> standard_library.install_hooks()
+ >>> import http.client
+ >>> # ...
+ >>> with standard_library.suspend_hooks():
+ >>> import requests # incompatible with ``future``'s standard library hooks
+
+ If the hooks were disabled before the context, they are not installed when
+ the context is left.
+ """
+ def __enter__(self):
+ self.hooks_were_installed = detect_hooks()
+ remove_hooks()
+ # self.scrubbed = scrub_future_sys_modules()
+ return self
+
+ def __exit__(self, *args):
+ if self.hooks_were_installed:
+ install_hooks()
+ # restore_sys_modules(self.scrubbed)
+
+
+def restore_sys_modules(scrubbed):
+ """
+ Add any previously scrubbed modules back to the sys.modules cache,
+ but only if it's safe to do so.
+ """
+ clash = set(sys.modules) & set(scrubbed)
+ if len(clash) != 0:
+ # If several, choose one arbitrarily to raise an exception about
+ first = list(clash)[0]
+ raise ImportError('future module {} clashes with Py2 module'
+ .format(first))
+ sys.modules.update(scrubbed)
+
+
+def install_aliases():
+ """
+ Monkey-patches the standard library in Py2.6/7 to provide
+ aliases for better Py3 compatibility.
+ """
+ if PY3:
+ return
+ # if hasattr(install_aliases, 'run_already'):
+ # return
+ for (newmodname, newobjname, oldmodname, oldobjname) in MOVES:
+ __import__(newmodname)
+ # We look up the module in sys.modules because __import__ just returns the
+ # top-level package:
+ newmod = sys.modules[newmodname]
+ # newmod.__future_module__ = True
+
+ __import__(oldmodname)
+ oldmod = sys.modules[oldmodname]
+
+ obj = getattr(oldmod, oldobjname)
+ setattr(newmod, newobjname, obj)
+
+ # Hack for urllib so it appears to have the same structure on Py2 as on Py3
+ import urllib
+ from future.backports.urllib import request
+ from future.backports.urllib import response
+ from future.backports.urllib import parse
+ from future.backports.urllib import error
+ from future.backports.urllib import robotparser
+ urllib.request = request
+ urllib.response = response
+ urllib.parse = parse
+ urllib.error = error
+ urllib.robotparser = robotparser
+ sys.modules['urllib.request'] = request
+ sys.modules['urllib.response'] = response
+ sys.modules['urllib.parse'] = parse
+ sys.modules['urllib.error'] = error
+ sys.modules['urllib.robotparser'] = robotparser
+
+ # Patch the test module so it appears to have the same structure on Py2 as on Py3
+ try:
+ import test
+ except ImportError:
+ pass
+ try:
+ from future.moves.test import support
+ except ImportError:
+ pass
+ else:
+ test.support = support
+ sys.modules['test.support'] = support
+
+ # Patch the dbm module so it appears to have the same structure on Py2 as on Py3
+ try:
+ import dbm
+ except ImportError:
+ pass
+ else:
+ from future.moves.dbm import dumb
+ dbm.dumb = dumb
+ sys.modules['dbm.dumb'] = dumb
+ try:
+ from future.moves.dbm import gnu
+ except ImportError:
+ pass
+ else:
+ dbm.gnu = gnu
+ sys.modules['dbm.gnu'] = gnu
+ try:
+ from future.moves.dbm import ndbm
+ except ImportError:
+ pass
+ else:
+ dbm.ndbm = ndbm
+ sys.modules['dbm.ndbm'] = ndbm
+
+ # install_aliases.run_already = True
+
+
+def install_hooks():
+ """
+ This function installs the future.standard_library import hook into
+ sys.meta_path.
+ """
+ if PY3:
+ return
+
+ install_aliases()
+
+ flog.debug('sys.meta_path was: {0}'.format(sys.meta_path))
+ flog.debug('Installing hooks ...')
+
+ # Add it unless it's there already
+ newhook = RenameImport(RENAMES)
+ if not detect_hooks():
+ sys.meta_path.append(newhook)
+ flog.debug('sys.meta_path is now: {0}'.format(sys.meta_path))
+
+
+def enable_hooks():
+ """
+ Deprecated. Use install_hooks() instead. This will be removed by
+ ``future`` v1.0.
+ """
+ install_hooks()
+
+
+def remove_hooks(scrub_sys_modules=False):
+ """
+ This function removes the import hook from sys.meta_path.
+ """
+ if PY3:
+ return
+ flog.debug('Uninstalling hooks ...')
+ # Loop backwards, so deleting items keeps the ordering:
+ for i, hook in list(enumerate(sys.meta_path))[::-1]:
+ if hasattr(hook, 'RENAMER'):
+ del sys.meta_path[i]
+
+ # Explicit is better than implicit. In the future the interface should
+ # probably change so that scrubbing the import hooks requires a separate
+ # function call. Left as is for now for backward compatibility with
+ # v0.11.x.
+ if scrub_sys_modules:
+ scrub_future_sys_modules()
+
+
+def disable_hooks():
+ """
+ Deprecated. Use remove_hooks() instead. This will be removed by
+ ``future`` v1.0.
+ """
+ remove_hooks()
+
+
+def detect_hooks():
+ """
+ Returns True if the import hooks are installed, False if not.
+ """
+ flog.debug('Detecting hooks ...')
+ present = any([hasattr(hook, 'RENAMER') for hook in sys.meta_path])
+ if present:
+ flog.debug('Detected.')
+ else:
+ flog.debug('Not detected.')
+ return present
+
+
+# As of v0.12, this no longer happens implicitly:
+# if not PY3:
+# install_hooks()
+
+
+if not hasattr(sys, 'py2_modules'):
+ sys.py2_modules = {}
+
+def cache_py2_modules():
+ """
+ Currently this function is unneeded, as we are not attempting to provide import hooks
+ for modules with ambiguous names: email, urllib, pickle.
+ """
+ if len(sys.py2_modules) != 0:
+ return
+ assert not detect_hooks()
+ import urllib
+ sys.py2_modules['urllib'] = urllib
+
+ import email
+ sys.py2_modules['email'] = email
+
+ import pickle
+ sys.py2_modules['pickle'] = pickle
+
+ # Not all Python installations have test module. (Anaconda doesn't, for example.)
+ # try:
+ # import test
+ # except ImportError:
+ # sys.py2_modules['test'] = None
+ # sys.py2_modules['test'] = test
+
+ # import dbm
+ # sys.py2_modules['dbm'] = dbm
+
+
+def import_(module_name, backport=False):
+ """
+ Pass a (potentially dotted) module name of a Python 3 standard library
+ module. This function imports the module compatibly on Py2 and Py3 and
+ returns the top-level module.
+
+ Example use:
+ >>> http = import_('http.client')
+ >>> http = import_('http.server')
+ >>> urllib = import_('urllib.request')
+
+ Then:
+ >>> conn = http.client.HTTPConnection(...)
+ >>> response = urllib.request.urlopen('http://mywebsite.com')
+ >>> # etc.
+
+ Use as follows:
+ >>> package_name = import_(module_name)
+
+ On Py3, equivalent to this:
+
+ >>> import module_name
+
+ On Py2, equivalent to this if backport=False:
+
+ >>> from future.moves import module_name
+
+ or to this if backport=True:
+
+ >>> from future.backports import module_name
+
+ except that it also handles dotted module names such as ``http.client``
+ The effect then is like this:
+
+ >>> from future.backports import module
+ >>> from future.backports.module import submodule
+ >>> module.submodule = submodule
+
+ Note that this would be a SyntaxError in Python:
+
+ >>> from future.backports import http.client
+
+ """
+ # Python 2.6 doesn't have importlib in the stdlib, so it requires
+ # the backported ``importlib`` package from PyPI as a dependency to use
+ # this function:
+ import importlib
+
+ if PY3:
+ return __import__(module_name)
+ else:
+ # client.blah = blah
+ # Then http.client = client
+ # etc.
+ if backport:
+ prefix = 'future.backports'
+ else:
+ prefix = 'future.moves'
+ parts = prefix.split('.') + module_name.split('.')
+
+ modules = []
+ for i, part in enumerate(parts):
+ sofar = '.'.join(parts[:i+1])
+ modules.append(importlib.import_module(sofar))
+ for i, part in reversed(list(enumerate(parts))):
+ if i == 0:
+ break
+ setattr(modules[i-1], part, modules[i])
+
+ # Return the next-most top-level module after future.backports / future.moves:
+ return modules[2]
+
+
+def from_import(module_name, *symbol_names, **kwargs):
+ """
+ Example use:
+ >>> HTTPConnection = from_import('http.client', 'HTTPConnection')
+ >>> HTTPServer = from_import('http.server', 'HTTPServer')
+ >>> urlopen, urlparse = from_import('urllib.request', 'urlopen', 'urlparse')
+
+ Equivalent to this on Py3:
+
+ >>> from module_name import symbol_names[0], symbol_names[1], ...
+
+ and this on Py2:
+
+ >>> from future.moves.module_name import symbol_names[0], ...
+
+ or:
+
+ >>> from future.backports.module_name import symbol_names[0], ...
+
+ except that it also handles dotted module names such as ``http.client``.
+ """
+
+ if PY3:
+ return __import__(module_name)
+ else:
+ if 'backport' in kwargs and bool(kwargs['backport']):
+ prefix = 'future.backports'
+ else:
+ prefix = 'future.moves'
+ parts = prefix.split('.') + module_name.split('.')
+ module = importlib.import_module(prefix + '.' + module_name)
+ output = [getattr(module, name) for name in symbol_names]
+ if len(output) == 1:
+ return output[0]
+ else:
+ return output
+
+
+class exclude_local_folder_imports(object):
+ """
+ A context-manager that prevents standard library modules like configparser
+ from being imported from the local python-future source folder on Py3.
+
+ (This was need prior to v0.16.0 because the presence of a configparser
+ folder would otherwise have prevented setuptools from running on Py3. Maybe
+ it's not needed any more?)
+ """
+ def __init__(self, *args):
+ assert len(args) > 0
+ self.module_names = args
+ # Disallow dotted module names like http.client:
+ if any(['.' in m for m in self.module_names]):
+ raise NotImplementedError('Dotted module names are not supported')
+
+ def __enter__(self):
+ self.old_sys_path = copy.copy(sys.path)
+ self.old_sys_modules = copy.copy(sys.modules)
+ if sys.version_info[0] < 3:
+ return
+ # The presence of all these indicates we've found our source folder,
+ # because `builtins` won't have been installed in site-packages by setup.py:
+ FUTURE_SOURCE_SUBFOLDERS = ['future', 'past', 'libfuturize', 'libpasteurize', 'builtins']
+
+ # Look for the future source folder:
+ for folder in self.old_sys_path:
+ if all([os.path.exists(os.path.join(folder, subfolder))
+ for subfolder in FUTURE_SOURCE_SUBFOLDERS]):
+ # Found it. Remove it.
+ sys.path.remove(folder)
+
+ # Ensure we import the system module:
+ for m in self.module_names:
+ # Delete the module and any submodules from sys.modules:
+ # for key in list(sys.modules):
+ # if key == m or key.startswith(m + '.'):
+ # try:
+ # del sys.modules[key]
+ # except KeyError:
+ # pass
+ try:
+ module = __import__(m, level=0)
+ except ImportError:
+ # There's a problem importing the system module. E.g. the
+ # winreg module is not available except on Windows.
+ pass
+
+ def __exit__(self, *args):
+ # Restore sys.path and sys.modules:
+ sys.path = self.old_sys_path
+ for m in set(self.old_sys_modules.keys()) - set(sys.modules.keys()):
+ sys.modules[m] = self.old_sys_modules[m]
+
+TOP_LEVEL_MODULES = ['builtins',
+ 'copyreg',
+ 'html',
+ 'http',
+ 'queue',
+ 'reprlib',
+ 'socketserver',
+ 'test',
+ 'tkinter',
+ 'winreg',
+ 'xmlrpc',
+ '_dummy_thread',
+ '_markupbase',
+ '_thread',
+ ]
+
+def import_top_level_modules():
+ with exclude_local_folder_imports(*TOP_LEVEL_MODULES):
+ for m in TOP_LEVEL_MODULES:
+ try:
+ __import__(m)
+ except ImportError: # e.g. winreg
+ pass
diff --git a/.install/.kodi/addons/script.module.future/libs/future/tests/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/.install/.kodi/addons/script.module.future/libs/future/tests/base.py b/.install/.kodi/addons/script.module.future/libs/future/tests/base.py
new file mode 100644
index 000000000..9f4607b69
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/tests/base.py
@@ -0,0 +1,531 @@
+from __future__ import print_function, absolute_import
+import os
+import tempfile
+import unittest
+import sys
+import re
+import warnings
+import io
+from textwrap import dedent
+
+from future.utils import bind_method, PY26, PY3, PY2, PY27
+from future.moves.subprocess import check_output, STDOUT, CalledProcessError
+
+if PY26:
+ import unittest2 as unittest
+
+
+def reformat_code(code):
+ """
+ Removes any leading \n and dedents.
+ """
+ if code.startswith('\n'):
+ code = code[1:]
+ return dedent(code)
+
+
+def order_future_lines(code):
+ """
+ Returns the code block with any ``__future__`` import lines sorted, and
+ then any ``future`` import lines sorted, then any ``builtins`` import lines
+ sorted.
+
+ This only sorts the lines within the expected blocks.
+
+ See test_order_future_lines() for an example.
+ """
+
+ # We need .splitlines(keepends=True), which doesn't exist on Py2,
+ # so we use this instead:
+ lines = code.split('\n')
+
+ uufuture_line_numbers = [i for i, line in enumerate(lines)
+ if line.startswith('from __future__ import ')]
+
+ future_line_numbers = [i for i, line in enumerate(lines)
+ if line.startswith('from future')
+ or line.startswith('from past')]
+
+ builtins_line_numbers = [i for i, line in enumerate(lines)
+ if line.startswith('from builtins')]
+
+ assert code.lstrip() == code, ('internal usage error: '
+ 'dedent the code before calling order_future_lines()')
+
+ def mymax(numbers):
+ return max(numbers) if len(numbers) > 0 else 0
+
+ def mymin(numbers):
+ return min(numbers) if len(numbers) > 0 else float('inf')
+
+ assert mymax(uufuture_line_numbers) <= mymin(future_line_numbers), \
+ 'the __future__ and future imports are out of order'
+
+ # assert mymax(future_line_numbers) <= mymin(builtins_line_numbers), \
+ # 'the future and builtins imports are out of order'
+
+ uul = sorted([lines[i] for i in uufuture_line_numbers])
+ sorted_uufuture_lines = dict(zip(uufuture_line_numbers, uul))
+
+ fl = sorted([lines[i] for i in future_line_numbers])
+ sorted_future_lines = dict(zip(future_line_numbers, fl))
+
+ bl = sorted([lines[i] for i in builtins_line_numbers])
+ sorted_builtins_lines = dict(zip(builtins_line_numbers, bl))
+
+ # Replace the old unsorted "from __future__ import ..." lines with the
+ # new sorted ones:
+ new_lines = []
+ for i in range(len(lines)):
+ if i in uufuture_line_numbers:
+ new_lines.append(sorted_uufuture_lines[i])
+ elif i in future_line_numbers:
+ new_lines.append(sorted_future_lines[i])
+ elif i in builtins_line_numbers:
+ new_lines.append(sorted_builtins_lines[i])
+ else:
+ new_lines.append(lines[i])
+ return '\n'.join(new_lines)
+
+
+class VerboseCalledProcessError(CalledProcessError):
+ """
+ Like CalledProcessError, but it displays more information (message and
+ script output) for diagnosing test failures etc.
+ """
+ def __init__(self, msg, returncode, cmd, output=None):
+ self.msg = msg
+ self.returncode = returncode
+ self.cmd = cmd
+ self.output = output
+
+ def __str__(self):
+ return ("Command '%s' failed with exit status %d\nMessage: %s\nOutput: %s"
+ % (self.cmd, self.returncode, self.msg, self.output))
+
+class FuturizeError(VerboseCalledProcessError):
+ pass
+
+class PasteurizeError(VerboseCalledProcessError):
+ pass
+
+
+class CodeHandler(unittest.TestCase):
+ """
+ Handy mixin for test classes for writing / reading / futurizing /
+ running .py files in the test suite.
+ """
+ def setUp(self):
+ """
+ The outputs from the various futurize stages should have the
+ following headers:
+ """
+ # After stage1:
+ # TODO: use this form after implementing a fixer to consolidate
+ # __future__ imports into a single line:
+ # self.headers1 = """
+ # from __future__ import absolute_import, division, print_function
+ # """
+ self.headers1 = reformat_code("""
+ from __future__ import absolute_import
+ from __future__ import division
+ from __future__ import print_function
+ """)
+
+ # After stage2 --all-imports:
+ # TODO: use this form after implementing a fixer to consolidate
+ # __future__ imports into a single line:
+ # self.headers2 = """
+ # from __future__ import (absolute_import, division,
+ # print_function, unicode_literals)
+ # from future import standard_library
+ # from future.builtins import *
+ # """
+ self.headers2 = reformat_code("""
+ from __future__ import absolute_import
+ from __future__ import division
+ from __future__ import print_function
+ from __future__ import unicode_literals
+ from future import standard_library
+ standard_library.install_aliases()
+ from builtins import *
+ """)
+ self.interpreters = [sys.executable]
+ self.tempdir = tempfile.mkdtemp() + os.path.sep
+ pypath = os.getenv('PYTHONPATH')
+ if pypath:
+ self.env = {'PYTHONPATH': os.getcwd() + os.pathsep + pypath}
+ else:
+ self.env = {'PYTHONPATH': os.getcwd()}
+
+ def convert(self, code, stages=(1, 2), all_imports=False, from3=False,
+ reformat=True, run=True, conservative=False):
+ """
+ Converts the code block using ``futurize`` and returns the
+ resulting code.
+
+ Passing stages=[1] or stages=[2] passes the flag ``--stage1`` or
+ ``stage2`` to ``futurize``. Passing both stages runs ``futurize``
+ with both stages by default.
+
+ If from3 is False, runs ``futurize``, converting from Python 2 to
+ both 2 and 3. If from3 is True, runs ``pasteurize`` to convert
+ from Python 3 to both 2 and 3.
+
+ Optionally reformats the code block first using the reformat() function.
+
+ If run is True, runs the resulting code under all Python
+ interpreters in self.interpreters.
+ """
+ if reformat:
+ code = reformat_code(code)
+ self._write_test_script(code)
+ self._futurize_test_script(stages=stages, all_imports=all_imports,
+ from3=from3, conservative=conservative)
+ output = self._read_test_script()
+ if run:
+ for interpreter in self.interpreters:
+ _ = self._run_test_script(interpreter=interpreter)
+ return output
+
+ def compare(self, output, expected, ignore_imports=True):
+ """
+ Compares whether the code blocks are equal. If not, raises an
+ exception so the test fails. Ignores any trailing whitespace like
+ blank lines.
+
+ If ignore_imports is True, passes the code blocks into the
+ strip_future_imports method.
+
+ If one code block is a unicode string and the other a
+ byte-string, it assumes the byte-string is encoded as utf-8.
+ """
+ if ignore_imports:
+ output = self.strip_future_imports(output)
+ expected = self.strip_future_imports(expected)
+ if isinstance(output, bytes) and not isinstance(expected, bytes):
+ output = output.decode('utf-8')
+ if isinstance(expected, bytes) and not isinstance(output, bytes):
+ expected = expected.decode('utf-8')
+ self.assertEqual(order_future_lines(output.rstrip()),
+ expected.rstrip())
+
+ def strip_future_imports(self, code):
+ """
+ Strips any of these import lines:
+
+ from __future__ import
+ from future
+ from future.
+ from builtins
+
+ or any line containing:
+ install_hooks()
+ or:
+ install_aliases()
+
+ Limitation: doesn't handle imports split across multiple lines like
+ this:
+
+ from __future__ import (absolute_import, division, print_function,
+ unicode_literals)
+ """
+ output = []
+ # We need .splitlines(keepends=True), which doesn't exist on Py2,
+ # so we use this instead:
+ for line in code.split('\n'):
+ if not (line.startswith('from __future__ import ')
+ or line.startswith('from future ')
+ or line.startswith('from builtins ')
+ or 'install_hooks()' in line
+ or 'install_aliases()' in line
+ # but don't match "from future_builtins" :)
+ or line.startswith('from future.')):
+ output.append(line)
+ return '\n'.join(output)
+
+ def convert_check(self, before, expected, stages=(1, 2), all_imports=False,
+ ignore_imports=True, from3=False, run=True,
+ conservative=False):
+ """
+ Convenience method that calls convert() and compare().
+
+ Reformats the code blocks automatically using the reformat_code()
+ function.
+
+ If all_imports is passed, we add the appropriate import headers
+ for the stage(s) selected to the ``expected`` code-block, so they
+ needn't appear repeatedly in the test code.
+
+ If ignore_imports is True, ignores the presence of any lines
+ beginning:
+
+ from __future__ import ...
+ from future import ...
+
+ for the purpose of the comparison.
+ """
+ output = self.convert(before, stages=stages, all_imports=all_imports,
+ from3=from3, run=run, conservative=conservative)
+ if all_imports:
+ headers = self.headers2 if 2 in stages else self.headers1
+ else:
+ headers = ''
+
+ self.compare(output, headers + reformat_code(expected),
+ ignore_imports=ignore_imports)
+
+ def unchanged(self, code, **kwargs):
+ """
+ Convenience method to ensure the code is unchanged by the
+ futurize process.
+ """
+ self.convert_check(code, code, **kwargs)
+
+ def _write_test_script(self, code, filename='mytestscript.py'):
+ """
+ Dedents the given code (a multiline string) and writes it out to
+ a file in a temporary folder like /tmp/tmpUDCn7x/mytestscript.py.
+ """
+ if isinstance(code, bytes):
+ code = code.decode('utf-8')
+ # Be explicit about encoding the temp file as UTF-8 (issue #63):
+ with io.open(self.tempdir + filename, 'wt', encoding='utf-8') as f:
+ f.write(dedent(code))
+
+ def _read_test_script(self, filename='mytestscript.py'):
+ with io.open(self.tempdir + filename, 'rt', encoding='utf-8') as f:
+ newsource = f.read()
+ return newsource
+
+ def _futurize_test_script(self, filename='mytestscript.py', stages=(1, 2),
+ all_imports=False, from3=False,
+ conservative=False):
+ params = []
+ stages = list(stages)
+ if all_imports:
+ params.append('--all-imports')
+ if from3:
+ script = 'pasteurize.py'
+ else:
+ script = 'futurize.py'
+ if stages == [1]:
+ params.append('--stage1')
+ elif stages == [2]:
+ params.append('--stage2')
+ else:
+ assert stages == [1, 2]
+ if conservative:
+ params.append('--conservative')
+ # No extra params needed
+
+ # Absolute file path:
+ fn = self.tempdir + filename
+ call_args = [sys.executable, script] + params + ['-w', fn]
+ try:
+ output = check_output(call_args, stderr=STDOUT, env=self.env)
+ except CalledProcessError as e:
+ with open(fn) as f:
+ msg = (
+ 'Error running the command %s\n'
+ '%s\n'
+ 'Contents of file %s:\n'
+ '\n'
+ '%s') % (
+ ' '.join(call_args),
+ 'env=%s' % self.env,
+ fn,
+ '----\n%s\n----' % f.read(),
+ )
+ ErrorClass = (FuturizeError if 'futurize' in script else PasteurizeError)
+ raise ErrorClass(msg, e.returncode, e.cmd, output=e.output)
+ return output
+
+ def _run_test_script(self, filename='mytestscript.py',
+ interpreter=sys.executable):
+ # Absolute file path:
+ fn = self.tempdir + filename
+ try:
+ output = check_output([interpreter, fn],
+ env=self.env, stderr=STDOUT)
+ except CalledProcessError as e:
+ with open(fn) as f:
+ msg = (
+ 'Error running the command %s\n'
+ '%s\n'
+ 'Contents of file %s:\n'
+ '\n'
+ '%s') % (
+ ' '.join([interpreter, fn]),
+ 'env=%s' % self.env,
+ fn,
+ '----\n%s\n----' % f.read(),
+ )
+ if not hasattr(e, 'output'):
+ # The attribute CalledProcessError.output doesn't exist on Py2.6
+ e.output = None
+ raise VerboseCalledProcessError(msg, e.returncode, e.cmd, output=e.output)
+ return output
+
+
+# Decorator to skip some tests on Python 2.6 ...
+skip26 = unittest.skipIf(PY26, "this test is known to fail on Py2.6")
+
+
+def expectedFailurePY3(func):
+ if not PY3:
+ return func
+ return unittest.expectedFailure(func)
+
+def expectedFailurePY26(func):
+ if not PY26:
+ return func
+ return unittest.expectedFailure(func)
+
+
+def expectedFailurePY27(func):
+ if not PY27:
+ return func
+ return unittest.expectedFailure(func)
+
+
+def expectedFailurePY2(func):
+ if not PY2:
+ return func
+ return unittest.expectedFailure(func)
+
+
+# Renamed in Py3.3:
+if not hasattr(unittest.TestCase, 'assertRaisesRegex'):
+ unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
+
+# From Py3.3:
+def assertRegex(self, text, expected_regex, msg=None):
+ """Fail the test unless the text matches the regular expression."""
+ if isinstance(expected_regex, (str, unicode)):
+ assert expected_regex, "expected_regex must not be empty."
+ expected_regex = re.compile(expected_regex)
+ if not expected_regex.search(text):
+ msg = msg or "Regex didn't match"
+ msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text)
+ raise self.failureException(msg)
+
+if not hasattr(unittest.TestCase, 'assertRegex'):
+ bind_method(unittest.TestCase, 'assertRegex', assertRegex)
+
+class _AssertRaisesBaseContext(object):
+
+ def __init__(self, expected, test_case, callable_obj=None,
+ expected_regex=None):
+ self.expected = expected
+ self.test_case = test_case
+ if callable_obj is not None:
+ try:
+ self.obj_name = callable_obj.__name__
+ except AttributeError:
+ self.obj_name = str(callable_obj)
+ else:
+ self.obj_name = None
+ if isinstance(expected_regex, (bytes, str)):
+ expected_regex = re.compile(expected_regex)
+ self.expected_regex = expected_regex
+ self.msg = None
+
+ def _raiseFailure(self, standardMsg):
+ msg = self.test_case._formatMessage(self.msg, standardMsg)
+ raise self.test_case.failureException(msg)
+
+ def handle(self, name, callable_obj, args, kwargs):
+ """
+ If callable_obj is None, assertRaises/Warns is being used as a
+ context manager, so check for a 'msg' kwarg and return self.
+ If callable_obj is not None, call it passing args and kwargs.
+ """
+ if callable_obj is None:
+ self.msg = kwargs.pop('msg', None)
+ return self
+ with self:
+ callable_obj(*args, **kwargs)
+
+class _AssertWarnsContext(_AssertRaisesBaseContext):
+ """A context manager used to implement TestCase.assertWarns* methods."""
+
+ def __enter__(self):
+ # The __warningregistry__'s need to be in a pristine state for tests
+ # to work properly.
+ for v in sys.modules.values():
+ if getattr(v, '__warningregistry__', None):
+ v.__warningregistry__ = {}
+ self.warnings_manager = warnings.catch_warnings(record=True)
+ self.warnings = self.warnings_manager.__enter__()
+ warnings.simplefilter("always", self.expected)
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.warnings_manager.__exit__(exc_type, exc_value, tb)
+ if exc_type is not None:
+ # let unexpected exceptions pass through
+ return
+ try:
+ exc_name = self.expected.__name__
+ except AttributeError:
+ exc_name = str(self.expected)
+ first_matching = None
+ for m in self.warnings:
+ w = m.message
+ if not isinstance(w, self.expected):
+ continue
+ if first_matching is None:
+ first_matching = w
+ if (self.expected_regex is not None and
+ not self.expected_regex.search(str(w))):
+ continue
+ # store warning for later retrieval
+ self.warning = w
+ self.filename = m.filename
+ self.lineno = m.lineno
+ return
+ # Now we simply try to choose a helpful failure message
+ if first_matching is not None:
+ self._raiseFailure('"{}" does not match "{}"'.format(
+ self.expected_regex.pattern, str(first_matching)))
+ if self.obj_name:
+ self._raiseFailure("{} not triggered by {}".format(exc_name,
+ self.obj_name))
+ else:
+ self._raiseFailure("{} not triggered".format(exc_name))
+
+
+def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs):
+ """Fail unless a warning of class warnClass is triggered
+ by callable_obj when invoked with arguments args and keyword
+ arguments kwargs. If a different type of warning is
+ triggered, it will not be handled: depending on the other
+ warning filtering rules in effect, it might be silenced, printed
+ out, or raised as an exception.
+
+ If called with callable_obj omitted or None, will return a
+ context object used like this::
+
+ with self.assertWarns(SomeWarning):
+ do_something()
+
+ An optional keyword argument 'msg' can be provided when assertWarns
+ is used as a context object.
+
+ The context manager keeps a reference to the first matching
+ warning as the 'warning' attribute; similarly, the 'filename'
+ and 'lineno' attributes give you information about the line
+ of Python code from which the warning was triggered.
+ This allows you to inspect the warning after the assertion::
+
+ with self.assertWarns(SomeWarning) as cm:
+ do_something()
+ the_warning = cm.warning
+ self.assertEqual(the_warning.some_attribute, 147)
+ """
+ context = _AssertWarnsContext(expected_warning, self, callable_obj)
+ return context.handle('assertWarns', callable_obj, args, kwargs)
+
+if not hasattr(unittest.TestCase, 'assertWarns'):
+ bind_method(unittest.TestCase, 'assertWarns', assertWarns)
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/types/__init__.py
new file mode 100644
index 000000000..062507703
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/__init__.py
@@ -0,0 +1,257 @@
+"""
+This module contains backports the data types that were significantly changed
+in the transition from Python 2 to Python 3.
+
+- an implementation of Python 3's bytes object (pure Python subclass of
+ Python 2's builtin 8-bit str type)
+- an implementation of Python 3's str object (pure Python subclass of
+ Python 2's builtin unicode type)
+- a backport of the range iterator from Py3 with slicing support
+
+It is used as follows::
+
+ from __future__ import division, absolute_import, print_function
+ from builtins import bytes, dict, int, range, str
+
+to bring in the new semantics for these functions from Python 3. And
+then, for example::
+
+ b = bytes(b'ABCD')
+ assert list(b) == [65, 66, 67, 68]
+ assert repr(b) == "b'ABCD'"
+ assert [65, 66] in b
+
+ # These raise TypeErrors:
+ # b + u'EFGH'
+ # b.split(u'B')
+ # bytes(b',').join([u'Fred', u'Bill'])
+
+
+ s = str(u'ABCD')
+
+ # These raise TypeErrors:
+ # s.join([b'Fred', b'Bill'])
+ # s.startswith(b'A')
+ # b'B' in s
+ # s.find(b'A')
+ # s.replace(u'A', b'a')
+
+ # This raises an AttributeError:
+ # s.decode('utf-8')
+
+ assert repr(s) == 'ABCD' # consistent repr with Py3 (no u prefix)
+
+
+ for i in range(10**11)[:10]:
+ pass
+
+and::
+
+ class VerboseList(list):
+ def append(self, item):
+ print('Adding an item')
+ super().append(item) # new simpler super() function
+
+For more information:
+---------------------
+
+- future.types.newbytes
+- future.types.newdict
+- future.types.newint
+- future.types.newobject
+- future.types.newrange
+- future.types.newstr
+
+
+Notes
+=====
+
+range()
+-------
+``range`` is a custom class that backports the slicing behaviour from
+Python 3 (based on the ``xrange`` module by Dan Crosta). See the
+``newrange`` module docstring for more details.
+
+
+super()
+-------
+``super()`` is based on Ryan Kelly's ``magicsuper`` module. See the
+``newsuper`` module docstring for more details.
+
+
+round()
+-------
+Python 3 modifies the behaviour of ``round()`` to use "Banker's Rounding".
+See http://stackoverflow.com/a/10825998. See the ``newround`` module
+docstring for more details.
+
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import functools
+from numbers import Integral
+
+from future import utils
+
+
+# Some utility functions to enforce strict type-separation of unicode str and
+# bytes:
+def disallow_types(argnums, disallowed_types):
+ """
+ A decorator that raises a TypeError if any of the given numbered
+ arguments is of the corresponding given type (e.g. bytes or unicode
+ string).
+
+ For example:
+
+ @disallow_types([0, 1], [unicode, bytes])
+ def f(a, b):
+ pass
+
+ raises a TypeError when f is called if a unicode object is passed as
+ `a` or a bytes object is passed as `b`.
+
+ This also skips over keyword arguments, so
+
+ @disallow_types([0, 1], [unicode, bytes])
+ def g(a, b=None):
+ pass
+
+ doesn't raise an exception if g is called with only one argument a,
+ e.g.:
+
+ g(b'Byte string')
+
+ Example use:
+
+ >>> class newbytes(object):
+ ... @disallow_types([1], [unicode])
+ ... def __add__(self, other):
+ ... pass
+
+ >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError: can't concat 'bytes' to (unicode) str
+ """
+
+ def decorator(function):
+
+ @functools.wraps(function)
+ def wrapper(*args, **kwargs):
+ # These imports are just for this decorator, and are defined here
+ # to prevent circular imports:
+ from .newbytes import newbytes
+ from .newint import newint
+ from .newstr import newstr
+
+ errmsg = "argument can't be {0}"
+ for (argnum, mytype) in zip(argnums, disallowed_types):
+ # Handle the case where the type is passed as a string like 'newbytes'.
+ if isinstance(mytype, str) or isinstance(mytype, bytes):
+ mytype = locals()[mytype]
+
+ # Only restrict kw args only if they are passed:
+ if len(args) <= argnum:
+ break
+
+ # Here we use type() rather than isinstance() because
+ # __instancecheck__ is being overridden. E.g.
+ # isinstance(b'abc', newbytes) is True on Py2.
+ if type(args[argnum]) == mytype:
+ raise TypeError(errmsg.format(mytype))
+
+ return function(*args, **kwargs)
+ return wrapper
+ return decorator
+
+
+def no(mytype, argnums=(1,)):
+ """
+ A shortcut for the disallow_types decorator that disallows only one type
+ (in any position in argnums).
+
+ Example use:
+
+ >>> class newstr(object):
+ ... @no('bytes')
+ ... def __add__(self, other):
+ ... pass
+
+ >>> newstr(u'1234') + b'1234' #doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError: argument can't be bytes
+
+ The object can also be passed directly, but passing the string helps
+ to prevent circular import problems.
+ """
+ if isinstance(argnums, Integral):
+ argnums = (argnums,)
+ disallowed_types = [mytype] * len(argnums)
+ return disallow_types(argnums, disallowed_types)
+
+
+def issubset(list1, list2):
+ """
+ Examples:
+
+ >>> issubset([], [65, 66, 67])
+ True
+ >>> issubset([65], [65, 66, 67])
+ True
+ >>> issubset([65, 66], [65, 66, 67])
+ True
+ >>> issubset([65, 67], [65, 66, 67])
+ False
+ """
+ n = len(list1)
+ for startpos in range(len(list2) - n + 1):
+ if list2[startpos:startpos+n] == list1:
+ return True
+ return False
+
+
+if utils.PY3:
+ import builtins
+ bytes = builtins.bytes
+ dict = builtins.dict
+ int = builtins.int
+ list = builtins.list
+ object = builtins.object
+ range = builtins.range
+ str = builtins.str
+
+ # The identity mapping
+ newtypes = {bytes: bytes,
+ dict: dict,
+ int: int,
+ list: list,
+ object: object,
+ range: range,
+ str: str}
+
+ __all__ = ['newtypes']
+
+else:
+
+ from .newbytes import newbytes
+ from .newdict import newdict
+ from .newint import newint
+ from .newlist import newlist
+ from .newrange import newrange
+ from .newobject import newobject
+ from .newstr import newstr
+
+ newtypes = {bytes: newbytes,
+ dict: newdict,
+ int: newint,
+ long: newint,
+ list: newlist,
+ object: newobject,
+ range: newrange,
+ str: newbytes,
+ unicode: newstr}
+
+ __all__ = ['newbytes', 'newdict', 'newint', 'newlist', 'newrange', 'newstr', 'newtypes']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newbytes.py b/.install/.kodi/addons/script.module.future/libs/future/types/newbytes.py
new file mode 100644
index 000000000..2a337c864
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newbytes.py
@@ -0,0 +1,456 @@
+"""
+Pure-Python implementation of a Python 3-like bytes object for Python 2.
+
+Why do this? Without it, the Python 2 bytes object is a very, very
+different beast to the Python 3 bytes object.
+"""
+
+from collections import Iterable
+from numbers import Integral
+import string
+import copy
+
+from future.utils import istext, isbytes, PY3, with_metaclass
+from future.types import no, issubset
+from future.types.newobject import newobject
+
+
+_builtin_bytes = bytes
+
+if PY3:
+ # We'll probably never use newstr on Py3 anyway...
+ unicode = str
+
+
+class BaseNewBytes(type):
+ def __instancecheck__(cls, instance):
+ if cls == newbytes:
+ return isinstance(instance, _builtin_bytes)
+ else:
+ return issubclass(instance.__class__, cls)
+
+
+def _newchr(x):
+ if isinstance(x, str): # this happens on pypy
+ return x.encode('ascii')
+ else:
+ return chr(x)
+
+
+class newbytes(with_metaclass(BaseNewBytes, _builtin_bytes)):
+ """
+ A backport of the Python 3 bytes object to Py2
+ """
+ def __new__(cls, *args, **kwargs):
+ """
+ From the Py3 bytes docstring:
+
+ bytes(iterable_of_ints) -> bytes
+ bytes(string, encoding[, errors]) -> bytes
+ bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer
+ bytes(int) -> bytes object of size given by the parameter initialized with null bytes
+ bytes() -> empty bytes object
+
+ Construct an immutable array of bytes from:
+ - an iterable yielding integers in range(256)
+ - a text string encoded using the specified encoding
+ - any object implementing the buffer API.
+ - an integer
+ """
+
+ encoding = None
+ errors = None
+
+ if len(args) == 0:
+ return super(newbytes, cls).__new__(cls)
+ elif len(args) >= 2:
+ args = list(args)
+ if len(args) == 3:
+ errors = args.pop()
+ encoding=args.pop()
+ # Was: elif isinstance(args[0], newbytes):
+ # We use type() instead of the above because we're redefining
+ # this to be True for all unicode string subclasses. Warning:
+ # This may render newstr un-subclassable.
+ if type(args[0]) == newbytes:
+ # Special-case: for consistency with Py3.3, we return the same object
+ # (with the same id) if a newbytes object is passed into the
+ # newbytes constructor.
+ return args[0]
+ elif isinstance(args[0], _builtin_bytes):
+ value = args[0]
+ elif isinstance(args[0], unicode):
+ try:
+ if 'encoding' in kwargs:
+ assert encoding is None
+ encoding = kwargs['encoding']
+ if 'errors' in kwargs:
+ assert errors is None
+ errors = kwargs['errors']
+ except AssertionError:
+ raise TypeError('Argument given by name and position')
+ if encoding is None:
+ raise TypeError('unicode string argument without an encoding')
+ ###
+ # Was: value = args[0].encode(**kwargs)
+ # Python 2.6 string encode() method doesn't take kwargs:
+ # Use this instead:
+ newargs = [encoding]
+ if errors is not None:
+ newargs.append(errors)
+ value = args[0].encode(*newargs)
+ ###
+ elif hasattr(args[0], '__bytes__'):
+ value = args[0].__bytes__()
+ elif isinstance(args[0], Iterable):
+ if len(args[0]) == 0:
+ # This could be an empty list or tuple. Return b'' as on Py3.
+ value = b''
+ else:
+ # Was: elif len(args[0])>0 and isinstance(args[0][0], Integral):
+ # # It's a list of integers
+ # But then we can't index into e.g. frozensets. Try to proceed
+ # anyway.
+ try:
+ value = bytearray([_newchr(x) for x in args[0]])
+ except:
+ raise ValueError('bytes must be in range(0, 256)')
+ elif isinstance(args[0], Integral):
+ if args[0] < 0:
+ raise ValueError('negative count')
+ value = b'\x00' * args[0]
+ else:
+ value = args[0]
+ if type(value) == newbytes:
+ # Above we use type(...) rather than isinstance(...) because the
+ # newbytes metaclass overrides __instancecheck__.
+ # oldbytes(value) gives the wrong thing on Py2: the same
+ # result as str(value) on Py3, e.g. "b'abc'". (Issue #193).
+ # So we handle this case separately:
+ return copy.copy(value)
+ else:
+ return super(newbytes, cls).__new__(cls, value)
+
+ def __repr__(self):
+ return 'b' + super(newbytes, self).__repr__()
+
+ def __str__(self):
+ return 'b' + "'{0}'".format(super(newbytes, self).__str__())
+
+ def __getitem__(self, y):
+ value = super(newbytes, self).__getitem__(y)
+ if isinstance(y, Integral):
+ return ord(value)
+ else:
+ return newbytes(value)
+
+ def __getslice__(self, *args):
+ return self.__getitem__(slice(*args))
+
+ def __contains__(self, key):
+ if isinstance(key, int):
+ newbyteskey = newbytes([key])
+ # Don't use isinstance() here because we only want to catch
+ # newbytes, not Python 2 str:
+ elif type(key) == newbytes:
+ newbyteskey = key
+ else:
+ newbyteskey = newbytes(key)
+ return issubset(list(newbyteskey), list(self))
+
+ @no(unicode)
+ def __add__(self, other):
+ return newbytes(super(newbytes, self).__add__(other))
+
+ @no(unicode)
+ def __radd__(self, left):
+ return newbytes(left) + self
+
+ @no(unicode)
+ def __mul__(self, other):
+ return newbytes(super(newbytes, self).__mul__(other))
+
+ @no(unicode)
+ def __rmul__(self, other):
+ return newbytes(super(newbytes, self).__rmul__(other))
+
+ def __mod__(self, vals):
+ if isinstance(vals, newbytes):
+ vals = _builtin_bytes.__str__(vals)
+
+ elif isinstance(vals, tuple):
+ newvals = []
+ for v in vals:
+ if isinstance(v, newbytes):
+ v = _builtin_bytes.__str__(v)
+ newvals.append(v)
+ vals = tuple(newvals)
+
+ elif (hasattr(vals.__class__, '__getitem__') and
+ hasattr(vals.__class__, 'iteritems')):
+ for k, v in vals.iteritems():
+ if isinstance(v, newbytes):
+ vals[k] = _builtin_bytes.__str__(v)
+
+ return _builtin_bytes.__mod__(self, vals)
+
+ def __imod__(self, other):
+ return self.__mod__(other)
+
+ def join(self, iterable_of_bytes):
+ errmsg = 'sequence item {0}: expected bytes, {1} found'
+ if isbytes(iterable_of_bytes) or istext(iterable_of_bytes):
+ raise TypeError(errmsg.format(0, type(iterable_of_bytes)))
+ for i, item in enumerate(iterable_of_bytes):
+ if istext(item):
+ raise TypeError(errmsg.format(i, type(item)))
+ return newbytes(super(newbytes, self).join(iterable_of_bytes))
+
+ @classmethod
+ def fromhex(cls, string):
+ # Only on Py2:
+ return cls(string.replace(' ', '').decode('hex'))
+
+ @no(unicode)
+ def find(self, sub, *args):
+ return super(newbytes, self).find(sub, *args)
+
+ @no(unicode)
+ def rfind(self, sub, *args):
+ return super(newbytes, self).rfind(sub, *args)
+
+ @no(unicode, (1, 2))
+ def replace(self, old, new, *args):
+ return newbytes(super(newbytes, self).replace(old, new, *args))
+
+ def encode(self, *args):
+ raise AttributeError("encode method has been disabled in newbytes")
+
+ def decode(self, encoding='utf-8', errors='strict'):
+ """
+ Returns a newstr (i.e. unicode subclass)
+
+ Decode B using the codec registered for encoding. Default encoding
+ is 'utf-8'. errors may be given to set a different error
+ handling scheme. Default is 'strict' meaning that encoding errors raise
+ a UnicodeDecodeError. Other possible values are 'ignore' and 'replace'
+ as well as any other name registered with codecs.register_error that is
+ able to handle UnicodeDecodeErrors.
+ """
+ # Py2 str.encode() takes encoding and errors as optional parameter,
+ # not keyword arguments as in Python 3 str.
+
+ from future.types.newstr import newstr
+
+ if errors == 'surrogateescape':
+ from future.utils.surrogateescape import register_surrogateescape
+ register_surrogateescape()
+
+ return newstr(super(newbytes, self).decode(encoding, errors))
+
+ # This is currently broken:
+ # # We implement surrogateescape error handling here in addition rather
+ # # than relying on the custom error handler from
+ # # future.utils.surrogateescape to be registered globally, even though
+ # # that is fine in the case of decoding. (But not encoding: see the
+ # # comments in newstr.encode()``.)
+ #
+ # if errors == 'surrogateescape':
+ # # Decode char by char
+ # mybytes = []
+ # for code in self:
+ # # Code is an int
+ # if 0x80 <= code <= 0xFF:
+ # b = 0xDC00 + code
+ # elif code <= 0x7F:
+ # b = _unichr(c).decode(encoding=encoding)
+ # else:
+ # # # It may be a bad byte
+ # # FIXME: What to do in this case? See the Py3 docs / tests.
+ # # # Try swallowing it.
+ # # continue
+ # # print("RAISE!")
+ # raise NotASurrogateError
+ # mybytes.append(b)
+ # return newbytes(mybytes)
+ # return newbytes(super(newstr, self).decode(encoding, errors))
+
+ @no(unicode)
+ def startswith(self, prefix, *args):
+ return super(newbytes, self).startswith(prefix, *args)
+
+ @no(unicode)
+ def endswith(self, prefix, *args):
+ return super(newbytes, self).endswith(prefix, *args)
+
+ @no(unicode)
+ def split(self, sep=None, maxsplit=-1):
+ # Py2 str.split() takes maxsplit as an optional parameter, not as a
+ # keyword argument as in Python 3 bytes.
+ parts = super(newbytes, self).split(sep, maxsplit)
+ return [newbytes(part) for part in parts]
+
+ def splitlines(self, keepends=False):
+ """
+ B.splitlines([keepends]) -> list of lines
+
+ Return a list of the lines in B, breaking at line boundaries.
+ Line breaks are not included in the resulting list unless keepends
+ is given and true.
+ """
+ # Py2 str.splitlines() takes keepends as an optional parameter,
+ # not as a keyword argument as in Python 3 bytes.
+ parts = super(newbytes, self).splitlines(keepends)
+ return [newbytes(part) for part in parts]
+
+ @no(unicode)
+ def rsplit(self, sep=None, maxsplit=-1):
+ # Py2 str.rsplit() takes maxsplit as an optional parameter, not as a
+ # keyword argument as in Python 3 bytes.
+ parts = super(newbytes, self).rsplit(sep, maxsplit)
+ return [newbytes(part) for part in parts]
+
+ @no(unicode)
+ def partition(self, sep):
+ parts = super(newbytes, self).partition(sep)
+ return tuple(newbytes(part) for part in parts)
+
+ @no(unicode)
+ def rpartition(self, sep):
+ parts = super(newbytes, self).rpartition(sep)
+ return tuple(newbytes(part) for part in parts)
+
+ @no(unicode, (1,))
+ def rindex(self, sub, *args):
+ '''
+ S.rindex(sub [,start [,end]]) -> int
+
+ Like S.rfind() but raise ValueError when the substring is not found.
+ '''
+ pos = self.rfind(sub, *args)
+ if pos == -1:
+ raise ValueError('substring not found')
+
+ @no(unicode)
+ def index(self, sub, *args):
+ '''
+ Returns index of sub in bytes.
+ Raises ValueError if byte is not in bytes and TypeError if can't
+ be converted bytes or its length is not 1.
+ '''
+ if isinstance(sub, int):
+ if len(args) == 0:
+ start, end = 0, len(self)
+ elif len(args) == 1:
+ start = args[0]
+ elif len(args) == 2:
+ start, end = args
+ else:
+ raise TypeError('takes at most 3 arguments')
+ return list(self)[start:end].index(sub)
+ if not isinstance(sub, bytes):
+ try:
+ sub = self.__class__(sub)
+ except (TypeError, ValueError):
+ raise TypeError("can't convert sub to bytes")
+ try:
+ return super(newbytes, self).index(sub, *args)
+ except ValueError:
+ raise ValueError('substring not found')
+
+ def __eq__(self, other):
+ if isinstance(other, (_builtin_bytes, bytearray)):
+ return super(newbytes, self).__eq__(other)
+ else:
+ return False
+
+ def __ne__(self, other):
+ if isinstance(other, _builtin_bytes):
+ return super(newbytes, self).__ne__(other)
+ else:
+ return True
+
+ unorderable_err = 'unorderable types: bytes() and {0}'
+
+ def __lt__(self, other):
+ if isinstance(other, _builtin_bytes):
+ return super(newbytes, self).__lt__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __le__(self, other):
+ if isinstance(other, _builtin_bytes):
+ return super(newbytes, self).__le__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __gt__(self, other):
+ if isinstance(other, _builtin_bytes):
+ return super(newbytes, self).__gt__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __ge__(self, other):
+ if isinstance(other, _builtin_bytes):
+ return super(newbytes, self).__ge__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __native__(self):
+ # We can't just feed a newbytes object into str(), because
+ # newbytes.__str__() returns e.g. "b'blah'", consistent with Py3 bytes.
+ return super(newbytes, self).__str__()
+
+ def __getattribute__(self, name):
+ """
+ A trick to cause the ``hasattr`` builtin-fn to return False for
+ the 'encode' method on Py2.
+ """
+ if name in ['encode', u'encode']:
+ raise AttributeError("encode method has been disabled in newbytes")
+ return super(newbytes, self).__getattribute__(name)
+
+ @no(unicode)
+ def rstrip(self, bytes_to_strip=None):
+ """
+ Strip trailing bytes contained in the argument.
+ If the argument is omitted, strip trailing ASCII whitespace.
+ """
+ return newbytes(super(newbytes, self).rstrip(bytes_to_strip))
+
+ @no(unicode)
+ def strip(self, bytes_to_strip=None):
+ """
+ Strip leading and trailing bytes contained in the argument.
+ If the argument is omitted, strip trailing ASCII whitespace.
+ """
+ return newbytes(super(newbytes, self).strip(bytes_to_strip))
+
+ def lower(self):
+ """
+ b.lower() -> copy of b
+
+ Return a copy of b with all ASCII characters converted to lowercase.
+ """
+ return newbytes(super(newbytes, self).lower())
+
+ @no(unicode)
+ def upper(self):
+ """
+ b.upper() -> copy of b
+
+ Return a copy of b with all ASCII characters converted to uppercase.
+ """
+ return newbytes(super(newbytes, self).upper())
+
+ @classmethod
+ @no(unicode)
+ def maketrans(cls, frm, to):
+ """
+ B.maketrans(frm, to) -> translation table
+
+ Return a translation table (a bytes object of length 256) suitable
+ for use in the bytes or bytearray translate method where each byte
+ in frm is mapped to the byte at the same position in to.
+ The bytes objects frm and to must be of the same length.
+ """
+ return newbytes(string.maketrans(frm, to))
+
+
+__all__ = ['newbytes']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newdict.py b/.install/.kodi/addons/script.module.future/libs/future/types/newdict.py
new file mode 100644
index 000000000..3f3a559dd
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newdict.py
@@ -0,0 +1,111 @@
+"""
+A dict subclass for Python 2 that behaves like Python 3's dict
+
+Example use:
+
+>>> from builtins import dict
+>>> d1 = dict() # instead of {} for an empty dict
+>>> d2 = dict(key1='value1', key2='value2')
+
+The keys, values and items methods now return iterators on Python 2.x
+(with set-like behaviour on Python 2.7).
+
+>>> for d in (d1, d2):
+... assert not isinstance(d.keys(), list)
+... assert not isinstance(d.values(), list)
+... assert not isinstance(d.items(), list)
+"""
+
+import sys
+
+from future.utils import with_metaclass
+from future.types.newobject import newobject
+
+
+_builtin_dict = dict
+ver = sys.version_info[:2]
+
+
+class BaseNewDict(type):
+ def __instancecheck__(cls, instance):
+ if cls == newdict:
+ return isinstance(instance, _builtin_dict)
+ else:
+ return issubclass(instance.__class__, cls)
+
+
+class newdict(with_metaclass(BaseNewDict, _builtin_dict)):
+ """
+ A backport of the Python 3 dict object to Py2
+ """
+ def items(self):
+ """
+ On Python 2.7+:
+ D.items() -> a set-like object providing a view on D's items
+ On Python 2.6:
+ D.items() -> an iterator over D's items
+ """
+ if ver == (2, 7):
+ return self.viewitems()
+ elif ver == (2, 6):
+ return self.iteritems()
+ elif ver >= (3, 0):
+ return self.items()
+
+ def keys(self):
+ """
+ On Python 2.7+:
+ D.keys() -> a set-like object providing a view on D's keys
+ On Python 2.6:
+ D.keys() -> an iterator over D's keys
+ """
+ if ver == (2, 7):
+ return self.viewkeys()
+ elif ver == (2, 6):
+ return self.iterkeys()
+ elif ver >= (3, 0):
+ return self.keys()
+
+ def values(self):
+ """
+ On Python 2.7+:
+ D.values() -> a set-like object providing a view on D's values
+ On Python 2.6:
+ D.values() -> an iterator over D's values
+ """
+ if ver == (2, 7):
+ return self.viewvalues()
+ elif ver == (2, 6):
+ return self.itervalues()
+ elif ver >= (3, 0):
+ return self.values()
+
+ def __new__(cls, *args, **kwargs):
+ """
+ dict() -> new empty dictionary
+ dict(mapping) -> new dictionary initialized from a mapping object's
+ (key, value) pairs
+ dict(iterable) -> new dictionary initialized as if via:
+ d = {}
+ for k, v in iterable:
+ d[k] = v
+ dict(**kwargs) -> new dictionary initialized with the name=value pairs
+ in the keyword argument list. For example: dict(one=1, two=2)
+ """
+
+ if len(args) == 0:
+ return super(newdict, cls).__new__(cls)
+ elif type(args[0]) == newdict:
+ value = args[0]
+ else:
+ value = args[0]
+ return super(newdict, cls).__new__(cls, value)
+
+ def __native__(self):
+ """
+ Hook for the future.utils.native() function
+ """
+ return dict(self)
+
+
+__all__ = ['newdict']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newint.py b/.install/.kodi/addons/script.module.future/libs/future/types/newint.py
new file mode 100644
index 000000000..705b8fa95
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newint.py
@@ -0,0 +1,379 @@
+"""
+Backport of Python 3's int, based on Py2's long.
+
+They are very similar. The most notable difference is:
+
+- representation: trailing L in Python 2 removed in Python 3
+"""
+from __future__ import division
+
+import struct
+import collections
+
+from future.types.newbytes import newbytes
+from future.types.newobject import newobject
+from future.utils import PY3, isint, istext, isbytes, with_metaclass, native
+
+
+if PY3:
+ long = int
+
+
+class BaseNewInt(type):
+ def __instancecheck__(cls, instance):
+ if cls == newint:
+ # Special case for Py2 short or long int
+ return isinstance(instance, (int, long))
+ else:
+ return issubclass(instance.__class__, cls)
+
+
+class newint(with_metaclass(BaseNewInt, long)):
+ """
+ A backport of the Python 3 int object to Py2
+ """
+ def __new__(cls, x=0, base=10):
+ """
+ From the Py3 int docstring:
+
+ | int(x=0) -> integer
+ | int(x, base=10) -> integer
+ |
+ | Convert a number or string to an integer, or return 0 if no
+ | arguments are given. If x is a number, return x.__int__(). For
+ | floating point numbers, this truncates towards zero.
+ |
+ | If x is not a number or if base is given, then x must be a string,
+ | bytes, or bytearray instance representing an integer literal in the
+ | given base. The literal can be preceded by '+' or '-' and be
+ | surrounded by whitespace. The base defaults to 10. Valid bases are
+ | 0 and 2-36. Base 0 means to interpret the base from the string as an
+ | integer literal.
+ | >>> int('0b100', base=0)
+ | 4
+
+ """
+ try:
+ val = x.__int__()
+ except AttributeError:
+ val = x
+ else:
+ if not isint(val):
+ raise TypeError('__int__ returned non-int ({0})'.format(
+ type(val)))
+
+ if base != 10:
+ # Explicit base
+ if not (istext(val) or isbytes(val) or isinstance(val, bytearray)):
+ raise TypeError(
+ "int() can't convert non-string with explicit base")
+ try:
+ return super(newint, cls).__new__(cls, val, base)
+ except TypeError:
+ return super(newint, cls).__new__(cls, newbytes(val), base)
+ # After here, base is 10
+ try:
+ return super(newint, cls).__new__(cls, val)
+ except TypeError:
+ # Py2 long doesn't handle bytearray input with an explicit base, so
+ # handle this here.
+ # Py3: int(bytearray(b'10'), 2) == 2
+ # Py2: int(bytearray(b'10'), 2) == 2 raises TypeError
+ # Py2: long(bytearray(b'10'), 2) == 2 raises TypeError
+ try:
+ return super(newint, cls).__new__(cls, newbytes(val))
+ except:
+ raise TypeError("newint argument must be a string or a number,"
+ "not '{0}'".format(type(val)))
+
+ def __repr__(self):
+ """
+ Without the L suffix
+ """
+ value = super(newint, self).__repr__()
+ assert value[-1] == 'L'
+ return value[:-1]
+
+ def __add__(self, other):
+ value = super(newint, self).__add__(other)
+ if value is NotImplemented:
+ return long(self) + other
+ return newint(value)
+
+ def __radd__(self, other):
+ value = super(newint, self).__radd__(other)
+ if value is NotImplemented:
+ return other + long(self)
+ return newint(value)
+
+ def __sub__(self, other):
+ value = super(newint, self).__sub__(other)
+ if value is NotImplemented:
+ return long(self) - other
+ return newint(value)
+
+ def __rsub__(self, other):
+ value = super(newint, self).__rsub__(other)
+ if value is NotImplemented:
+ return other - long(self)
+ return newint(value)
+
+ def __mul__(self, other):
+ value = super(newint, self).__mul__(other)
+ if isint(value):
+ return newint(value)
+ elif value is NotImplemented:
+ return long(self) * other
+ return value
+
+ def __rmul__(self, other):
+ value = super(newint, self).__rmul__(other)
+ if isint(value):
+ return newint(value)
+ elif value is NotImplemented:
+ return other * long(self)
+ return value
+
+ def __div__(self, other):
+ # We override this rather than e.g. relying on object.__div__ or
+ # long.__div__ because we want to wrap the value in a newint()
+ # call if other is another int
+ value = long(self) / other
+ if isinstance(other, (int, long)):
+ return newint(value)
+ else:
+ return value
+
+ def __rdiv__(self, other):
+ value = other / long(self)
+ if isinstance(other, (int, long)):
+ return newint(value)
+ else:
+ return value
+
+ def __idiv__(self, other):
+ # long has no __idiv__ method. Use __itruediv__ and cast back to
+ # newint:
+ value = self.__itruediv__(other)
+ if isinstance(other, (int, long)):
+ return newint(value)
+ else:
+ return value
+
+ def __truediv__(self, other):
+ value = super(newint, self).__truediv__(other)
+ if value is NotImplemented:
+ value = long(self) / other
+ return value
+
+ def __rtruediv__(self, other):
+ return super(newint, self).__rtruediv__(other)
+
+ def __itruediv__(self, other):
+ # long has no __itruediv__ method
+ mylong = long(self)
+ mylong /= other
+ return mylong
+
+ def __floordiv__(self, other):
+ return newint(super(newint, self).__floordiv__(other))
+
+ def __rfloordiv__(self, other):
+ return newint(super(newint, self).__rfloordiv__(other))
+
+ def __ifloordiv__(self, other):
+ # long has no __ifloordiv__ method
+ mylong = long(self)
+ mylong //= other
+ return newint(mylong)
+
+ def __mod__(self, other):
+ value = super(newint, self).__mod__(other)
+ if value is NotImplemented:
+ return long(self) % other
+ return newint(value)
+
+ def __rmod__(self, other):
+ value = super(newint, self).__rmod__(other)
+ if value is NotImplemented:
+ return other % long(self)
+ return newint(value)
+
+ def __divmod__(self, other):
+ value = super(newint, self).__divmod__(other)
+ if value is NotImplemented:
+ mylong = long(self)
+ return (mylong // other, mylong % other)
+ return (newint(value[0]), newint(value[1]))
+
+ def __rdivmod__(self, other):
+ value = super(newint, self).__rdivmod__(other)
+ if value is NotImplemented:
+ mylong = long(self)
+ return (other // mylong, other % mylong)
+ return (newint(value[0]), newint(value[1]))
+
+ def __pow__(self, other):
+ value = super(newint, self).__pow__(other)
+ if value is NotImplemented:
+ return long(self) ** other
+ return newint(value)
+
+ def __rpow__(self, other):
+ value = super(newint, self).__rpow__(other)
+ if value is NotImplemented:
+ return other ** long(self)
+ return newint(value)
+
+ def __lshift__(self, other):
+ if not isint(other):
+ raise TypeError(
+ "unsupported operand type(s) for <<: '%s' and '%s'" %
+ (type(self).__name__, type(other).__name__))
+ return newint(super(newint, self).__lshift__(other))
+
+ def __rshift__(self, other):
+ if not isint(other):
+ raise TypeError(
+ "unsupported operand type(s) for >>: '%s' and '%s'" %
+ (type(self).__name__, type(other).__name__))
+ return newint(super(newint, self).__rshift__(other))
+
+ def __and__(self, other):
+ if not isint(other):
+ raise TypeError(
+ "unsupported operand type(s) for &: '%s' and '%s'" %
+ (type(self).__name__, type(other).__name__))
+ return newint(super(newint, self).__and__(other))
+
+ def __or__(self, other):
+ if not isint(other):
+ raise TypeError(
+ "unsupported operand type(s) for |: '%s' and '%s'" %
+ (type(self).__name__, type(other).__name__))
+ return newint(super(newint, self).__or__(other))
+
+ def __xor__(self, other):
+ if not isint(other):
+ raise TypeError(
+ "unsupported operand type(s) for ^: '%s' and '%s'" %
+ (type(self).__name__, type(other).__name__))
+ return newint(super(newint, self).__xor__(other))
+
+ def __neg__(self):
+ return newint(super(newint, self).__neg__())
+
+ def __pos__(self):
+ return newint(super(newint, self).__pos__())
+
+ def __abs__(self):
+ return newint(super(newint, self).__abs__())
+
+ def __invert__(self):
+ return newint(super(newint, self).__invert__())
+
+ def __int__(self):
+ return self
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def __bool__(self):
+ """
+ So subclasses can override this, Py3-style
+ """
+ return super(newint, self).__nonzero__()
+
+ def __native__(self):
+ return long(self)
+
+ def to_bytes(self, length, byteorder='big', signed=False):
+ """
+ Return an array of bytes representing an integer.
+
+ The integer is represented using length bytes. An OverflowError is
+ raised if the integer is not representable with the given number of
+ bytes.
+
+ The byteorder argument determines the byte order used to represent the
+ integer. If byteorder is 'big', the most significant byte is at the
+ beginning of the byte array. If byteorder is 'little', the most
+ significant byte is at the end of the byte array. To request the native
+ byte order of the host system, use `sys.byteorder' as the byte order value.
+
+ The signed keyword-only argument determines whether two's complement is
+ used to represent the integer. If signed is False and a negative integer
+ is given, an OverflowError is raised.
+ """
+ if length < 0:
+ raise ValueError("length argument must be non-negative")
+ if length == 0 and self == 0:
+ return newbytes()
+ if signed and self < 0:
+ bits = length * 8
+ num = (2**bits) + self
+ if num <= 0:
+ raise OverflowError("int too smal to convert")
+ else:
+ if self < 0:
+ raise OverflowError("can't convert negative int to unsigned")
+ num = self
+ if byteorder not in ('little', 'big'):
+ raise ValueError("byteorder must be either 'little' or 'big'")
+ h = b'%x' % num
+ s = newbytes((b'0'*(len(h) % 2) + h).zfill(length*2).decode('hex'))
+ if signed:
+ high_set = s[0] & 0x80
+ if self > 0 and high_set:
+ raise OverflowError("int too big to convert")
+ if self < 0 and not high_set:
+ raise OverflowError("int too small to convert")
+ if len(s) > length:
+ raise OverflowError("int too big to convert")
+ return s if byteorder == 'big' else s[::-1]
+
+ @classmethod
+ def from_bytes(cls, mybytes, byteorder='big', signed=False):
+ """
+ Return the integer represented by the given array of bytes.
+
+ The mybytes argument must either support the buffer protocol or be an
+ iterable object producing bytes. Bytes and bytearray are examples of
+ built-in objects that support the buffer protocol.
+
+ The byteorder argument determines the byte order used to represent the
+ integer. If byteorder is 'big', the most significant byte is at the
+ beginning of the byte array. If byteorder is 'little', the most
+ significant byte is at the end of the byte array. To request the native
+ byte order of the host system, use `sys.byteorder' as the byte order value.
+
+ The signed keyword-only argument indicates whether two's complement is
+ used to represent the integer.
+ """
+ if byteorder not in ('little', 'big'):
+ raise ValueError("byteorder must be either 'little' or 'big'")
+ if isinstance(mybytes, unicode):
+ raise TypeError("cannot convert unicode objects to bytes")
+ # mybytes can also be passed as a sequence of integers on Py3.
+ # Test for this:
+ elif isinstance(mybytes, collections.Iterable):
+ mybytes = newbytes(mybytes)
+ b = mybytes if byteorder == 'big' else mybytes[::-1]
+ if len(b) == 0:
+ b = b'\x00'
+ # The encode() method has been disabled by newbytes, but Py2's
+ # str has it:
+ num = int(native(b).encode('hex'), 16)
+ if signed and (b[0] & 0x80):
+ num = num - (2 ** (len(b)*8))
+ return cls(num)
+
+
+# def _twos_comp(val, bits):
+# """compute the 2's compliment of int value val"""
+# if( (val&(1<<(bits-1))) != 0 ):
+# val = val - (1<>> from builtins import list
+>>> l1 = list() # instead of {} for an empty list
+>>> l1.append('hello')
+>>> l2 = l1.copy()
+
+"""
+
+import sys
+import copy
+
+from future.utils import with_metaclass
+from future.types.newobject import newobject
+
+
+_builtin_list = list
+ver = sys.version_info[:2]
+
+
+class BaseNewList(type):
+ def __instancecheck__(cls, instance):
+ if cls == newlist:
+ return isinstance(instance, _builtin_list)
+ else:
+ return issubclass(instance.__class__, cls)
+
+
+class newlist(with_metaclass(BaseNewList, _builtin_list)):
+ """
+ A backport of the Python 3 list object to Py2
+ """
+ def copy(self):
+ """
+ L.copy() -> list -- a shallow copy of L
+ """
+ return copy.copy(self)
+
+ def clear(self):
+ """L.clear() -> None -- remove all items from L"""
+ for i in range(len(self)):
+ self.pop()
+
+ def __new__(cls, *args, **kwargs):
+ """
+ list() -> new empty list
+ list(iterable) -> new list initialized from iterable's items
+ """
+
+ if len(args) == 0:
+ return super(newlist, cls).__new__(cls)
+ elif type(args[0]) == newlist:
+ value = args[0]
+ else:
+ value = args[0]
+ return super(newlist, cls).__new__(cls, value)
+
+ def __add__(self, value):
+ return newlist(super(newlist, self).__add__(value))
+
+ def __radd__(self, left):
+ " left + self "
+ try:
+ return newlist(left) + self
+ except:
+ return NotImplemented
+
+ def __getitem__(self, y):
+ """
+ x.__getitem__(y) <==> x[y]
+
+ Warning: a bug in Python 2.x prevents indexing via a slice from
+ returning a newlist object.
+ """
+ if isinstance(y, slice):
+ return newlist(super(newlist, self).__getitem__(y))
+ else:
+ return super(newlist, self).__getitem__(y)
+
+ def __native__(self):
+ """
+ Hook for the future.utils.native() function
+ """
+ return list(self)
+
+ def __nonzero__(self):
+ return len(self) > 0
+
+
+__all__ = ['newlist']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newmemoryview.py b/.install/.kodi/addons/script.module.future/libs/future/types/newmemoryview.py
new file mode 100644
index 000000000..72c6990a7
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newmemoryview.py
@@ -0,0 +1,27 @@
+"""
+A pretty lame implementation of a memoryview object for Python 2.6.
+"""
+
+from collections import Iterable
+from numbers import Integral
+import string
+
+from future.utils import istext, isbytes, PY3, with_metaclass
+from future.types import no, issubset
+
+
+# class BaseNewBytes(type):
+# def __instancecheck__(cls, instance):
+# return isinstance(instance, _builtin_bytes)
+
+
+class newmemoryview(object): # with_metaclass(BaseNewBytes, _builtin_bytes)):
+ """
+ A pretty lame backport of the Python 2.7 and Python 3.x
+ memoryviewview object to Py2.6.
+ """
+ def __init__(self, obj):
+ return obj
+
+
+__all__ = ['newmemoryview']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newobject.py b/.install/.kodi/addons/script.module.future/libs/future/types/newobject.py
new file mode 100644
index 000000000..776d47664
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newobject.py
@@ -0,0 +1,116 @@
+"""
+An object subclass for Python 2 that gives new-style classes written in the
+style of Python 3 (with ``__next__`` and unicode-returning ``__str__`` methods)
+the appropriate Python 2-style ``next`` and ``__unicode__`` methods for compatible.
+
+Example use::
+
+ from builtins import object
+
+ my_unicode_str = u'Unicode string: \u5b54\u5b50'
+
+ class A(object):
+ def __str__(self):
+ return my_unicode_str
+
+ a = A()
+ print(str(a))
+
+ # On Python 2, these relations hold:
+ assert unicode(a) == my_unicode_string
+ assert str(a) == my_unicode_string.encode('utf-8')
+
+
+Another example::
+
+ from builtins import object
+
+ class Upper(object):
+ def __init__(self, iterable):
+ self._iter = iter(iterable)
+ def __next__(self): # note the Py3 interface
+ return next(self._iter).upper()
+ def __iter__(self):
+ return self
+
+ assert list(Upper('hello')) == list('HELLO')
+
+"""
+
+
+class newobject(object):
+ """
+ A magical object class that provides Python 2 compatibility methods::
+ next
+ __unicode__
+ __nonzero__
+
+ Subclasses of this class can merely define the Python 3 methods (__next__,
+ __str__, and __bool__).
+ """
+ def next(self):
+ if hasattr(self, '__next__'):
+ return type(self).__next__(self)
+ raise TypeError('newobject is not an iterator')
+
+ def __unicode__(self):
+ # All subclasses of the builtin object should have __str__ defined.
+ # Note that old-style classes do not have __str__ defined.
+ if hasattr(self, '__str__'):
+ s = type(self).__str__(self)
+ else:
+ s = str(self)
+ if isinstance(s, unicode):
+ return s
+ else:
+ return s.decode('utf-8')
+
+ def __nonzero__(self):
+ if hasattr(self, '__bool__'):
+ return type(self).__bool__(self)
+ if hasattr(self, '__len__'):
+ return type(self).__len__(self)
+ # object has no __nonzero__ method
+ return True
+
+ # Are these ever needed?
+ # def __div__(self):
+ # return self.__truediv__()
+
+ # def __idiv__(self, other):
+ # return self.__itruediv__(other)
+
+ def __long__(self):
+ if not hasattr(self, '__int__'):
+ return NotImplemented
+ return self.__int__() # not type(self).__int__(self)
+
+ # def __new__(cls, *args, **kwargs):
+ # """
+ # dict() -> new empty dictionary
+ # dict(mapping) -> new dictionary initialized from a mapping object's
+ # (key, value) pairs
+ # dict(iterable) -> new dictionary initialized as if via:
+ # d = {}
+ # for k, v in iterable:
+ # d[k] = v
+ # dict(**kwargs) -> new dictionary initialized with the name=value pairs
+ # in the keyword argument list. For example: dict(one=1, two=2)
+ # """
+
+ # if len(args) == 0:
+ # return super(newdict, cls).__new__(cls)
+ # elif type(args[0]) == newdict:
+ # return args[0]
+ # else:
+ # value = args[0]
+ # return super(newdict, cls).__new__(cls, value)
+
+ def __native__(self):
+ """
+ Hook for the future.utils.native() function
+ """
+ return object(self)
+
+
+__all__ = ['newobject']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newopen.py b/.install/.kodi/addons/script.module.future/libs/future/types/newopen.py
new file mode 100644
index 000000000..b75d45afb
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newopen.py
@@ -0,0 +1,32 @@
+"""
+A substitute for the Python 3 open() function.
+
+Note that io.open() is more complete but maybe slower. Even so, the
+completeness may be a better default. TODO: compare these
+"""
+
+_builtin_open = open
+
+class newopen(object):
+ """Wrapper providing key part of Python 3 open() interface.
+
+ From IPython's py3compat.py module. License: BSD.
+ """
+ def __init__(self, fname, mode="r", encoding="utf-8"):
+ self.f = _builtin_open(fname, mode)
+ self.enc = encoding
+
+ def write(self, s):
+ return self.f.write(s.encode(self.enc))
+
+ def read(self, size=-1):
+ return self.f.read(size).decode(self.enc)
+
+ def close(self):
+ return self.f.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, etype, value, traceback):
+ self.f.close()
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newrange.py b/.install/.kodi/addons/script.module.future/libs/future/types/newrange.py
new file mode 100644
index 000000000..9173b0509
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newrange.py
@@ -0,0 +1,165 @@
+"""
+Nearly identical to xrange.py, by Dan Crosta, from
+
+ https://github.com/dcrosta/xrange.git
+
+This is included here in the ``future`` package rather than pointed to as
+a dependency because there is no package for ``xrange`` on PyPI. It is
+also tweaked to appear like a regular Python 3 ``range`` object rather
+than a Python 2 xrange.
+
+From Dan Crosta's README:
+
+ "A pure-Python implementation of Python 2.7's xrange built-in, with
+ some features backported from the Python 3.x range built-in (which
+ replaced xrange) in that version."
+
+ Read more at
+ https://late.am/post/2012/06/18/what-the-heck-is-an-xrange
+"""
+from __future__ import absolute_import
+
+from collections import Sequence, Iterator
+from itertools import islice
+
+from future.backports.misc import count # with step parameter on Py2.6
+# For backward compatibility with python-future versions < 0.14.4:
+_count = count
+
+
+class newrange(Sequence):
+ """
+ Pure-Python backport of Python 3's range object. See `the CPython
+ documentation for details:
+ `_
+ """
+
+ def __init__(self, *args):
+ if len(args) == 1:
+ start, stop, step = 0, args[0], 1
+ elif len(args) == 2:
+ start, stop, step = args[0], args[1], 1
+ elif len(args) == 3:
+ start, stop, step = args
+ else:
+ raise TypeError('range() requires 1-3 int arguments')
+
+ try:
+ start, stop, step = int(start), int(stop), int(step)
+ except ValueError:
+ raise TypeError('an integer is required')
+
+ if step == 0:
+ raise ValueError('range() arg 3 must not be zero')
+ elif step < 0:
+ stop = min(stop, start)
+ else:
+ stop = max(stop, start)
+
+ self._start = start
+ self._stop = stop
+ self._step = step
+ self._len = (stop - start) // step + bool((stop - start) % step)
+
+ @property
+ def start(self):
+ return self._start
+
+ @property
+ def stop(self):
+ return self._stop
+
+ @property
+ def step(self):
+ return self._step
+
+ def __repr__(self):
+ if self._step == 1:
+ return 'range(%d, %d)' % (self._start, self._stop)
+ return 'range(%d, %d, %d)' % (self._start, self._stop, self._step)
+
+ def __eq__(self, other):
+ return (isinstance(other, newrange) and
+ (self._len == 0 == other._len or
+ (self._start, self._step, self._len) ==
+ (other._start, other._step, self._len)))
+
+ def __len__(self):
+ return self._len
+
+ def index(self, value):
+ """Return the 0-based position of integer `value` in
+ the sequence this range represents."""
+ try:
+ diff = value - self._start
+ except TypeError:
+ raise ValueError('%r is not in range' % value)
+ quotient, remainder = divmod(diff, self._step)
+ if remainder == 0 and 0 <= quotient < self._len:
+ return abs(quotient)
+ raise ValueError('%r is not in range' % value)
+
+ def count(self, value):
+ """Return the number of ocurrences of integer `value`
+ in the sequence this range represents."""
+ # a value can occur exactly zero or one times
+ return int(value in self)
+
+ def __contains__(self, value):
+ """Return ``True`` if the integer `value` occurs in
+ the sequence this range represents."""
+ try:
+ self.index(value)
+ return True
+ except ValueError:
+ return False
+
+ def __reversed__(self):
+ return iter(self[::-1])
+
+ def __getitem__(self, index):
+ """Return the element at position ``index`` in the sequence
+ this range represents, or raise :class:`IndexError` if the
+ position is out of range."""
+ if isinstance(index, slice):
+ return self.__getitem_slice(index)
+ if index < 0:
+ # negative indexes access from the end
+ index = self._len + index
+ if index < 0 or index >= self._len:
+ raise IndexError('range object index out of range')
+ return self._start + index * self._step
+
+ def __getitem_slice(self, slce):
+ """Return a range which represents the requested slce
+ of the sequence represented by this range.
+ """
+ scaled_indices = (self._step * n for n in slce.indices(self._len))
+ start_offset, stop_offset, new_step = scaled_indices
+ return newrange(self._start + start_offset,
+ self._start + stop_offset,
+ new_step)
+
+ def __iter__(self):
+ """Return an iterator which enumerates the elements of the
+ sequence this range represents."""
+ return range_iterator(self)
+
+
+class range_iterator(Iterator):
+ """An iterator for a :class:`range`.
+ """
+ def __init__(self, range_):
+ self._stepper = islice(count(range_.start, range_.step), len(range_))
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._stepper)
+
+ def next(self):
+ return next(self._stepper)
+
+
+__all__ = ['newrange']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newstr.py b/.install/.kodi/addons/script.module.future/libs/future/types/newstr.py
new file mode 100644
index 000000000..e6272fb90
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/types/newstr.py
@@ -0,0 +1,416 @@
+"""
+This module redefines ``str`` on Python 2.x to be a subclass of the Py2
+``unicode`` type that behaves like the Python 3.x ``str``.
+
+The main differences between ``newstr`` and Python 2.x's ``unicode`` type are
+the stricter type-checking and absence of a `u''` prefix in the representation.
+
+It is designed to be used together with the ``unicode_literals`` import
+as follows:
+
+ >>> from __future__ import unicode_literals
+ >>> from builtins import str, isinstance
+
+On Python 3.x and normally on Python 2.x, these expressions hold
+
+ >>> str('blah') is 'blah'
+ True
+ >>> isinstance('blah', str)
+ True
+
+However, on Python 2.x, with this import:
+
+ >>> from __future__ import unicode_literals
+
+the same expressions are False:
+
+ >>> str('blah') is 'blah'
+ False
+ >>> isinstance('blah', str)
+ False
+
+This module is designed to be imported together with ``unicode_literals`` on
+Python 2 to bring the meaning of ``str`` back into alignment with unprefixed
+string literals (i.e. ``unicode`` subclasses).
+
+Note that ``str()`` (and ``print()``) would then normally call the
+``__unicode__`` method on objects in Python 2. To define string
+representations of your objects portably across Py3 and Py2, use the
+:func:`python_2_unicode_compatible` decorator in :mod:`future.utils`.
+
+"""
+
+from collections import Iterable
+from numbers import Number
+
+from future.utils import PY3, istext, with_metaclass, isnewbytes
+from future.types import no, issubset
+from future.types.newobject import newobject
+
+
+if PY3:
+ # We'll probably never use newstr on Py3 anyway...
+ unicode = str
+
+
+class BaseNewStr(type):
+ def __instancecheck__(cls, instance):
+ if cls == newstr:
+ return isinstance(instance, unicode)
+ else:
+ return issubclass(instance.__class__, cls)
+
+
+class newstr(with_metaclass(BaseNewStr, unicode)):
+ """
+ A backport of the Python 3 str object to Py2
+ """
+ no_convert_msg = "Can't convert '{0}' object to str implicitly"
+
+ def __new__(cls, *args, **kwargs):
+ """
+ From the Py3 str docstring:
+
+ str(object='') -> str
+ str(bytes_or_buffer[, encoding[, errors]]) -> str
+
+ Create a new string object from the given object. If encoding or
+ errors is specified, then the object must expose a data buffer
+ that will be decoded using the given encoding and error handler.
+ Otherwise, returns the result of object.__str__() (if defined)
+ or repr(object).
+ encoding defaults to sys.getdefaultencoding().
+ errors defaults to 'strict'.
+
+ """
+ if len(args) == 0:
+ return super(newstr, cls).__new__(cls)
+ # Special case: If someone requests str(str(u'abc')), return the same
+ # object (same id) for consistency with Py3.3. This is not true for
+ # other objects like list or dict.
+ elif type(args[0]) == newstr and cls == newstr:
+ return args[0]
+ elif isinstance(args[0], unicode):
+ value = args[0]
+ elif isinstance(args[0], bytes): # i.e. Py2 bytes or newbytes
+ if 'encoding' in kwargs or len(args) > 1:
+ value = args[0].decode(*args[1:], **kwargs)
+ else:
+ value = args[0].__str__()
+ else:
+ value = args[0]
+ return super(newstr, cls).__new__(cls, value)
+
+ def __repr__(self):
+ """
+ Without the u prefix
+ """
+ value = super(newstr, self).__repr__()
+ # assert value[0] == u'u'
+ return value[1:]
+
+ def __getitem__(self, y):
+ """
+ Warning: Python <= 2.7.6 has a bug that causes this method never to be called
+ when y is a slice object. Therefore the type of newstr()[:2] is wrong
+ (unicode instead of newstr).
+ """
+ return newstr(super(newstr, self).__getitem__(y))
+
+ def __contains__(self, key):
+ errmsg = "'in ' requires string as left operand, not {0}"
+ # Don't use isinstance() here because we only want to catch
+ # newstr, not Python 2 unicode:
+ if type(key) == newstr:
+ newkey = key
+ elif isinstance(key, unicode) or isinstance(key, bytes) and not isnewbytes(key):
+ newkey = newstr(key)
+ else:
+ raise TypeError(errmsg.format(type(key)))
+ return issubset(list(newkey), list(self))
+
+ @no('newbytes')
+ def __add__(self, other):
+ return newstr(super(newstr, self).__add__(other))
+
+ @no('newbytes')
+ def __radd__(self, left):
+ " left + self "
+ try:
+ return newstr(left) + self
+ except:
+ return NotImplemented
+
+ def __mul__(self, other):
+ return newstr(super(newstr, self).__mul__(other))
+
+ def __rmul__(self, other):
+ return newstr(super(newstr, self).__rmul__(other))
+
+ def join(self, iterable):
+ errmsg = 'sequence item {0}: expected unicode string, found bytes'
+ for i, item in enumerate(iterable):
+ # Here we use type() rather than isinstance() because
+ # __instancecheck__ is being overridden. E.g.
+ # isinstance(b'abc', newbytes) is True on Py2.
+ if isnewbytes(item):
+ raise TypeError(errmsg.format(i))
+ # Support use as a staticmethod: str.join('-', ['a', 'b'])
+ if type(self) == newstr:
+ return newstr(super(newstr, self).join(iterable))
+ else:
+ return newstr(super(newstr, newstr(self)).join(iterable))
+
+ @no('newbytes')
+ def find(self, sub, *args):
+ return super(newstr, self).find(sub, *args)
+
+ @no('newbytes')
+ def rfind(self, sub, *args):
+ return super(newstr, self).rfind(sub, *args)
+
+ @no('newbytes', (1, 2))
+ def replace(self, old, new, *args):
+ return newstr(super(newstr, self).replace(old, new, *args))
+
+ def decode(self, *args):
+ raise AttributeError("decode method has been disabled in newstr")
+
+ def encode(self, encoding='utf-8', errors='strict'):
+ """
+ Returns bytes
+
+ Encode S using the codec registered for encoding. Default encoding
+ is 'utf-8'. errors may be given to set a different error
+ handling scheme. Default is 'strict' meaning that encoding errors raise
+ a UnicodeEncodeError. Other possible values are 'ignore', 'replace' and
+ 'xmlcharrefreplace' as well as any other name registered with
+ codecs.register_error that can handle UnicodeEncodeErrors.
+ """
+ from future.types.newbytes import newbytes
+ # Py2 unicode.encode() takes encoding and errors as optional parameter,
+ # not keyword arguments as in Python 3 str.
+
+ # For the surrogateescape error handling mechanism, the
+ # codecs.register_error() function seems to be inadequate for an
+ # implementation of it when encoding. (Decoding seems fine, however.)
+ # For example, in the case of
+ # u'\udcc3'.encode('ascii', 'surrogateescape_handler')
+ # after registering the ``surrogateescape_handler`` function in
+ # future.utils.surrogateescape, both Python 2.x and 3.x raise an
+ # exception anyway after the function is called because the unicode
+ # string it has to return isn't encodable strictly as ASCII.
+
+ if errors == 'surrogateescape':
+ if encoding == 'utf-16':
+ # Known to fail here. See test_encoding_works_normally()
+ raise NotImplementedError('FIXME: surrogateescape handling is '
+ 'not yet implemented properly')
+ # Encode char by char, building up list of byte-strings
+ mybytes = []
+ for c in self:
+ code = ord(c)
+ if 0xD800 <= code <= 0xDCFF:
+ mybytes.append(newbytes([code - 0xDC00]))
+ else:
+ mybytes.append(c.encode(encoding=encoding))
+ return newbytes(b'').join(mybytes)
+ return newbytes(super(newstr, self).encode(encoding, errors))
+
+ @no('newbytes', 1)
+ def startswith(self, prefix, *args):
+ if isinstance(prefix, Iterable):
+ for thing in prefix:
+ if isnewbytes(thing):
+ raise TypeError(self.no_convert_msg.format(type(thing)))
+ return super(newstr, self).startswith(prefix, *args)
+
+ @no('newbytes', 1)
+ def endswith(self, prefix, *args):
+ # Note we need the decorator above as well as the isnewbytes()
+ # check because prefix can be either a bytes object or e.g. a
+ # tuple of possible prefixes. (If it's a bytes object, each item
+ # in it is an int.)
+ if isinstance(prefix, Iterable):
+ for thing in prefix:
+ if isnewbytes(thing):
+ raise TypeError(self.no_convert_msg.format(type(thing)))
+ return super(newstr, self).endswith(prefix, *args)
+
+ @no('newbytes', 1)
+ def split(self, sep=None, maxsplit=-1):
+ # Py2 unicode.split() takes maxsplit as an optional parameter,
+ # not as a keyword argument as in Python 3 str.
+ parts = super(newstr, self).split(sep, maxsplit)
+ return [newstr(part) for part in parts]
+
+ @no('newbytes', 1)
+ def rsplit(self, sep=None, maxsplit=-1):
+ # Py2 unicode.rsplit() takes maxsplit as an optional parameter,
+ # not as a keyword argument as in Python 3 str.
+ parts = super(newstr, self).rsplit(sep, maxsplit)
+ return [newstr(part) for part in parts]
+
+ @no('newbytes', 1)
+ def partition(self, sep):
+ parts = super(newstr, self).partition(sep)
+ return tuple(newstr(part) for part in parts)
+
+ @no('newbytes', 1)
+ def rpartition(self, sep):
+ parts = super(newstr, self).rpartition(sep)
+ return tuple(newstr(part) for part in parts)
+
+ @no('newbytes', 1)
+ def index(self, sub, *args):
+ """
+ Like newstr.find() but raise ValueError when the substring is not
+ found.
+ """
+ pos = self.find(sub, *args)
+ if pos == -1:
+ raise ValueError('substring not found')
+ return pos
+
+ def splitlines(self, keepends=False):
+ """
+ S.splitlines(keepends=False) -> list of strings
+
+ Return a list of the lines in S, breaking at line boundaries.
+ Line breaks are not included in the resulting list unless keepends
+ is given and true.
+ """
+ # Py2 unicode.splitlines() takes keepends as an optional parameter,
+ # not as a keyword argument as in Python 3 str.
+ parts = super(newstr, self).splitlines(keepends)
+ return [newstr(part) for part in parts]
+
+ def __eq__(self, other):
+ if (isinstance(other, unicode) or
+ isinstance(other, bytes) and not isnewbytes(other)):
+ return super(newstr, self).__eq__(other)
+ else:
+ return False
+
+ def __ne__(self, other):
+ if (isinstance(other, unicode) or
+ isinstance(other, bytes) and not isnewbytes(other)):
+ return super(newstr, self).__ne__(other)
+ else:
+ return True
+
+ unorderable_err = 'unorderable types: str() and {0}'
+
+ def __lt__(self, other):
+ if (isinstance(other, unicode) or
+ isinstance(other, bytes) and not isnewbytes(other)):
+ return super(newstr, self).__lt__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __le__(self, other):
+ if (isinstance(other, unicode) or
+ isinstance(other, bytes) and not isnewbytes(other)):
+ return super(newstr, self).__le__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __gt__(self, other):
+ if (isinstance(other, unicode) or
+ isinstance(other, bytes) and not isnewbytes(other)):
+ return super(newstr, self).__gt__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __ge__(self, other):
+ if (isinstance(other, unicode) or
+ isinstance(other, bytes) and not isnewbytes(other)):
+ return super(newstr, self).__ge__(other)
+ raise TypeError(self.unorderable_err.format(type(other)))
+
+ def __getattribute__(self, name):
+ """
+ A trick to cause the ``hasattr`` builtin-fn to return False for
+ the 'decode' method on Py2.
+ """
+ if name in ['decode', u'decode']:
+ raise AttributeError("decode method has been disabled in newstr")
+ return super(newstr, self).__getattribute__(name)
+
+ def __native__(self):
+ """
+ A hook for the future.utils.native() function.
+ """
+ return unicode(self)
+
+ @staticmethod
+ def maketrans(x, y=None, z=None):
+ """
+ Return a translation table usable for str.translate().
+
+ If there is only one argument, it must be a dictionary mapping Unicode
+ ordinals (integers) or characters to Unicode ordinals, strings or None.
+ Character keys will be then converted to ordinals.
+ If there are two arguments, they must be strings of equal length, and
+ in the resulting dictionary, each character in x will be mapped to the
+ character at the same position in y. If there is a third argument, it
+ must be a string, whose characters will be mapped to None in the result.
+ """
+
+ if y is None:
+ assert z is None
+ if not isinstance(x, dict):
+ raise TypeError('if you give only one argument to maketrans it must be a dict')
+ result = {}
+ for (key, value) in x.items():
+ if len(key) > 1:
+ raise ValueError('keys in translate table must be strings or integers')
+ result[ord(key)] = value
+ else:
+ if not isinstance(x, unicode) and isinstance(y, unicode):
+ raise TypeError('x and y must be unicode strings')
+ if not len(x) == len(y):
+ raise ValueError('the first two maketrans arguments must have equal length')
+ result = {}
+ for (xi, yi) in zip(x, y):
+ if len(xi) > 1:
+ raise ValueError('keys in translate table must be strings or integers')
+ result[ord(xi)] = ord(yi)
+
+ if z is not None:
+ for char in z:
+ result[ord(char)] = None
+ return result
+
+ def translate(self, table):
+ """
+ S.translate(table) -> str
+
+ Return a copy of the string S, where all characters have been mapped
+ through the given translation table, which must be a mapping of
+ Unicode ordinals to Unicode ordinals, strings, or None.
+ Unmapped characters are left untouched. Characters mapped to None
+ are deleted.
+ """
+ l = []
+ for c in self:
+ if ord(c) in table:
+ val = table[ord(c)]
+ if val is None:
+ continue
+ elif isinstance(val, unicode):
+ l.append(val)
+ else:
+ l.append(chr(val))
+ else:
+ l.append(c)
+ return ''.join(l)
+
+ def isprintable(self):
+ raise NotImplementedError('fixme')
+
+ def isidentifier(self):
+ raise NotImplementedError('fixme')
+
+ def format_map(self):
+ raise NotImplementedError('fixme')
+
+
+__all__ = ['newstr']
diff --git a/.install/.kodi/addons/script.module.future/libs/future/utils/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/utils/__init__.py
new file mode 100644
index 000000000..906f1e464
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/utils/__init__.py
@@ -0,0 +1,741 @@
+"""
+A selection of cross-compatible functions for Python 2 and 3.
+
+This module exports useful functions for 2/3 compatible code:
+
+ * bind_method: binds functions to classes
+ * ``native_str_to_bytes`` and ``bytes_to_native_str``
+ * ``native_str``: always equal to the native platform string object (because
+ this may be shadowed by imports from future.builtins)
+ * lists: lrange(), lmap(), lzip(), lfilter()
+ * iterable method compatibility:
+ - iteritems, iterkeys, itervalues
+ - viewitems, viewkeys, viewvalues
+
+ These use the original method if available, otherwise they use items,
+ keys, values.
+
+ * types:
+
+ * text_type: unicode in Python 2, str in Python 3
+ * binary_type: str in Python 2, bytes in Python 3
+ * string_types: basestring in Python 2, str in Python 3
+
+ * bchr(c):
+ Take an integer and make a 1-character byte string
+ * bord(c)
+ Take the result of indexing on a byte string and make an integer
+ * tobytes(s)
+ Take a text string, a byte string, or a sequence of characters taken
+ from a byte string, and make a byte string.
+
+ * raise_from()
+ * raise_with_traceback()
+
+This module also defines these decorators:
+
+ * ``python_2_unicode_compatible``
+ * ``with_metaclass``
+ * ``implements_iterator``
+
+Some of the functions in this module come from the following sources:
+
+ * Jinja2 (BSD licensed: see
+ https://github.com/mitsuhiko/jinja2/blob/master/LICENSE)
+ * Pandas compatibility module pandas.compat
+ * six.py by Benjamin Peterson
+ * Django
+"""
+
+import types
+import sys
+import numbers
+import functools
+import copy
+import inspect
+
+
+PY3 = sys.version_info[0] == 3
+PY35_PLUS = sys.version_info[0:2] >= (3, 5)
+PY36_PLUS = sys.version_info[0:2] >= (3, 6)
+PY2 = sys.version_info[0] == 2
+PY26 = sys.version_info[0:2] == (2, 6)
+PY27 = sys.version_info[0:2] == (2, 7)
+PYPY = hasattr(sys, 'pypy_translation_info')
+
+
+def python_2_unicode_compatible(cls):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python
+ 2. Under Python 3, this decorator is a no-op.
+
+ To support Python 2 and 3 with a single code base, define a __str__
+ method returning unicode text and apply this decorator to the class, like
+ this::
+
+ >>> from future.utils import python_2_unicode_compatible
+
+ >>> @python_2_unicode_compatible
+ ... class MyClass(object):
+ ... def __str__(self):
+ ... return u'Unicode string: \u5b54\u5b50'
+
+ >>> a = MyClass()
+
+ Then, after this import:
+
+ >>> from future.builtins import str
+
+ the following is ``True`` on both Python 3 and 2::
+
+ >>> str(a) == a.encode('utf-8').decode('utf-8')
+ True
+
+ and, on a Unicode-enabled terminal with the right fonts, these both print the
+ Chinese characters for Confucius::
+
+ >>> print(a)
+ >>> print(str(a))
+
+ The implementation comes from django.utils.encoding.
+ """
+ if not PY3:
+ cls.__unicode__ = cls.__str__
+ cls.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return cls
+
+
+def with_metaclass(meta, *bases):
+ """
+ Function from jinja2/_compat.py. License: BSD.
+
+ Use it like this::
+
+ class BaseForm(object):
+ pass
+
+ class FormType(type):
+ pass
+
+ class Form(with_metaclass(FormType, BaseForm)):
+ pass
+
+ This requires a bit of explanation: the basic idea is to make a
+ dummy metaclass for one level of class instantiation that replaces
+ itself with the actual metaclass. Because of internal type checks
+ we also need to make sure that we downgrade the custom metaclass
+ for one level to something closer to type (that's why __call__ and
+ __init__ comes back from type etc.).
+
+ This has the advantage over six.with_metaclass of not introducing
+ dummy classes into the final MRO.
+ """
+ class metaclass(meta):
+ __call__ = type.__call__
+ __init__ = type.__init__
+ def __new__(cls, name, this_bases, d):
+ if this_bases is None:
+ return type.__new__(cls, name, (), d)
+ return meta(name, bases, d)
+ return metaclass('temporary_class', None, {})
+
+
+# Definitions from pandas.compat and six.py follow:
+if PY3:
+ def bchr(s):
+ return bytes([s])
+ def bstr(s):
+ if isinstance(s, str):
+ return bytes(s, 'latin-1')
+ else:
+ return bytes(s)
+ def bord(s):
+ return s
+
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+else:
+ # Python 2
+ def bchr(s):
+ return chr(s)
+ def bstr(s):
+ return str(s)
+ def bord(s):
+ return ord(s)
+
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+###
+
+if PY3:
+ def tobytes(s):
+ if isinstance(s, bytes):
+ return s
+ else:
+ if isinstance(s, str):
+ return s.encode('latin-1')
+ else:
+ return bytes(s)
+else:
+ # Python 2
+ def tobytes(s):
+ if isinstance(s, unicode):
+ return s.encode('latin-1')
+ else:
+ return ''.join(s)
+
+tobytes.__doc__ = """
+ Encodes to latin-1 (where the first 256 chars are the same as
+ ASCII.)
+ """
+
+if PY3:
+ def native_str_to_bytes(s, encoding='utf-8'):
+ return s.encode(encoding)
+
+ def bytes_to_native_str(b, encoding='utf-8'):
+ return b.decode(encoding)
+
+ def text_to_native_str(t, encoding=None):
+ return t
+else:
+ # Python 2
+ def native_str_to_bytes(s, encoding=None):
+ from future.types import newbytes # to avoid a circular import
+ return newbytes(s)
+
+ def bytes_to_native_str(b, encoding=None):
+ return native(b)
+
+ def text_to_native_str(t, encoding='ascii'):
+ """
+ Use this to create a Py2 native string when "from __future__ import
+ unicode_literals" is in effect.
+ """
+ return unicode(t).encode(encoding)
+
+native_str_to_bytes.__doc__ = """
+ On Py3, returns an encoded string.
+ On Py2, returns a newbytes type, ignoring the ``encoding`` argument.
+ """
+
+if PY3:
+ # list-producing versions of the major Python iterating functions
+ def lrange(*args, **kwargs):
+ return list(range(*args, **kwargs))
+
+ def lzip(*args, **kwargs):
+ return list(zip(*args, **kwargs))
+
+ def lmap(*args, **kwargs):
+ return list(map(*args, **kwargs))
+
+ def lfilter(*args, **kwargs):
+ return list(filter(*args, **kwargs))
+else:
+ import __builtin__
+ # Python 2-builtin ranges produce lists
+ lrange = __builtin__.range
+ lzip = __builtin__.zip
+ lmap = __builtin__.map
+ lfilter = __builtin__.filter
+
+
+def isidentifier(s, dotted=False):
+ '''
+ A function equivalent to the str.isidentifier method on Py3
+ '''
+ if dotted:
+ return all(isidentifier(a) for a in s.split('.'))
+ if PY3:
+ return s.isidentifier()
+ else:
+ import re
+ _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
+ return bool(_name_re.match(s))
+
+
+def viewitems(obj, **kwargs):
+ """
+ Function for iterating over dictionary items with the same set-like
+ behaviour on Py2.7 as on Py3.
+
+ Passes kwargs to method."""
+ func = getattr(obj, "viewitems", None)
+ if not func:
+ func = obj.items
+ return func(**kwargs)
+
+
+def viewkeys(obj, **kwargs):
+ """
+ Function for iterating over dictionary keys with the same set-like
+ behaviour on Py2.7 as on Py3.
+
+ Passes kwargs to method."""
+ func = getattr(obj, "viewkeys", None)
+ if not func:
+ func = obj.keys
+ return func(**kwargs)
+
+
+def viewvalues(obj, **kwargs):
+ """
+ Function for iterating over dictionary values with the same set-like
+ behaviour on Py2.7 as on Py3.
+
+ Passes kwargs to method."""
+ func = getattr(obj, "viewvalues", None)
+ if not func:
+ func = obj.values
+ return func(**kwargs)
+
+
+def iteritems(obj, **kwargs):
+ """Use this only if compatibility with Python versions before 2.7 is
+ required. Otherwise, prefer viewitems().
+ """
+ func = getattr(obj, "iteritems", None)
+ if not func:
+ func = obj.items
+ return func(**kwargs)
+
+
+def iterkeys(obj, **kwargs):
+ """Use this only if compatibility with Python versions before 2.7 is
+ required. Otherwise, prefer viewkeys().
+ """
+ func = getattr(obj, "iterkeys", None)
+ if not func:
+ func = obj.keys
+ return func(**kwargs)
+
+
+def itervalues(obj, **kwargs):
+ """Use this only if compatibility with Python versions before 2.7 is
+ required. Otherwise, prefer viewvalues().
+ """
+ func = getattr(obj, "itervalues", None)
+ if not func:
+ func = obj.values
+ return func(**kwargs)
+
+
+def bind_method(cls, name, func):
+ """Bind a method to class, python 2 and python 3 compatible.
+
+ Parameters
+ ----------
+
+ cls : type
+ class to receive bound method
+ name : basestring
+ name of method on class instance
+ func : function
+ function to be bound as method
+
+ Returns
+ -------
+ None
+ """
+ # only python 2 has an issue with bound/unbound methods
+ if not PY3:
+ setattr(cls, name, types.MethodType(func, None, cls))
+ else:
+ setattr(cls, name, func)
+
+
+def getexception():
+ return sys.exc_info()[1]
+
+
+def _get_caller_globals_and_locals():
+ """
+ Returns the globals and locals of the calling frame.
+
+ Is there an alternative to frame hacking here?
+ """
+ caller_frame = inspect.stack()[2]
+ myglobals = caller_frame[0].f_globals
+ mylocals = caller_frame[0].f_locals
+ return myglobals, mylocals
+
+
+def _repr_strip(mystring):
+ """
+ Returns the string without any initial or final quotes.
+ """
+ r = repr(mystring)
+ if r.startswith("'") and r.endswith("'"):
+ return r[1:-1]
+ else:
+ return r
+
+
+if PY3:
+ def raise_from(exc, cause):
+ """
+ Equivalent to:
+
+ raise EXCEPTION from CAUSE
+
+ on Python 3. (See PEP 3134).
+ """
+ myglobals, mylocals = _get_caller_globals_and_locals()
+
+ # We pass the exception and cause along with other globals
+ # when we exec():
+ myglobals = myglobals.copy()
+ myglobals['__python_future_raise_from_exc'] = exc
+ myglobals['__python_future_raise_from_cause'] = cause
+ execstr = "raise __python_future_raise_from_exc from __python_future_raise_from_cause"
+ exec(execstr, myglobals, mylocals)
+
+ def raise_(tp, value=None, tb=None):
+ """
+ A function that matches the Python 2.x ``raise`` statement. This
+ allows re-raising exceptions with the cls value and traceback on
+ Python 2 and 3.
+ """
+ if value is not None and isinstance(tp, Exception):
+ raise TypeError("instance exception may not have a separate value")
+ if value is not None:
+ exc = tp(value)
+ else:
+ exc = tp
+ if exc.__traceback__ is not tb:
+ raise exc.with_traceback(tb)
+ raise exc
+
+ def raise_with_traceback(exc, traceback=Ellipsis):
+ if traceback == Ellipsis:
+ _, _, traceback = sys.exc_info()
+ raise exc.with_traceback(traceback)
+
+else:
+ def raise_from(exc, cause):
+ """
+ Equivalent to:
+
+ raise EXCEPTION from CAUSE
+
+ on Python 3. (See PEP 3134).
+ """
+ # Is either arg an exception class (e.g. IndexError) rather than
+ # instance (e.g. IndexError('my message here')? If so, pass the
+ # name of the class undisturbed through to "raise ... from ...".
+ if isinstance(exc, type) and issubclass(exc, Exception):
+ e = exc()
+ # exc = exc.__name__
+ # execstr = "e = " + _repr_strip(exc) + "()"
+ # myglobals, mylocals = _get_caller_globals_and_locals()
+ # exec(execstr, myglobals, mylocals)
+ else:
+ e = exc
+ e.__suppress_context__ = False
+ if isinstance(cause, type) and issubclass(cause, Exception):
+ e.__cause__ = cause()
+ e.__suppress_context__ = True
+ elif cause is None:
+ e.__cause__ = None
+ e.__suppress_context__ = True
+ elif isinstance(cause, BaseException):
+ e.__cause__ = cause
+ e.__suppress_context__ = True
+ else:
+ raise TypeError("exception causes must derive from BaseException")
+ e.__context__ = sys.exc_info()[1]
+ raise e
+
+ exec('''
+def raise_(tp, value=None, tb=None):
+ raise tp, value, tb
+
+def raise_with_traceback(exc, traceback=Ellipsis):
+ if traceback == Ellipsis:
+ _, _, traceback = sys.exc_info()
+ raise exc, None, traceback
+'''.strip())
+
+
+raise_with_traceback.__doc__ = (
+"""Raise exception with existing traceback.
+If traceback is not passed, uses sys.exc_info() to get traceback."""
+)
+
+
+# Deprecated alias for backward compatibility with ``future`` versions < 0.11:
+reraise = raise_
+
+
+def implements_iterator(cls):
+ '''
+ From jinja2/_compat.py. License: BSD.
+
+ Use as a decorator like this::
+
+ @implements_iterator
+ class UppercasingIterator(object):
+ def __init__(self, iterable):
+ self._iter = iter(iterable)
+ def __iter__(self):
+ return self
+ def __next__(self):
+ return next(self._iter).upper()
+
+ '''
+ if PY3:
+ return cls
+ else:
+ cls.next = cls.__next__
+ del cls.__next__
+ return cls
+
+if PY3:
+ get_next = lambda x: x.next
+else:
+ get_next = lambda x: x.__next__
+
+
+def encode_filename(filename):
+ if PY3:
+ return filename
+ else:
+ if isinstance(filename, unicode):
+ return filename.encode('utf-8')
+ return filename
+
+
+def is_new_style(cls):
+ """
+ Python 2.7 has both new-style and old-style classes. Old-style classes can
+ be pesky in some circumstances, such as when using inheritance. Use this
+ function to test for whether a class is new-style. (Python 3 only has
+ new-style classes.)
+ """
+ return hasattr(cls, '__class__') and ('__dict__' in dir(cls)
+ or hasattr(cls, '__slots__'))
+
+# The native platform string and bytes types. Useful because ``str`` and
+# ``bytes`` are redefined on Py2 by ``from future.builtins import *``.
+native_str = str
+native_bytes = bytes
+
+
+def istext(obj):
+ """
+ Deprecated. Use::
+ >>> isinstance(obj, str)
+ after this import:
+ >>> from future.builtins import str
+ """
+ return isinstance(obj, type(u''))
+
+
+def isbytes(obj):
+ """
+ Deprecated. Use::
+ >>> isinstance(obj, bytes)
+ after this import:
+ >>> from future.builtins import bytes
+ """
+ return isinstance(obj, type(b''))
+
+
+def isnewbytes(obj):
+ """
+ Equivalent to the result of ``isinstance(obj, newbytes)`` were
+ ``__instancecheck__`` not overridden on the newbytes subclass. In
+ other words, it is REALLY a newbytes instance, not a Py2 native str
+ object?
+ """
+ # TODO: generalize this so that it works with subclasses of newbytes
+ # Import is here to avoid circular imports:
+ from future.types.newbytes import newbytes
+ return type(obj) == newbytes
+
+
+def isint(obj):
+ """
+ Deprecated. Tests whether an object is a Py3 ``int`` or either a Py2 ``int`` or
+ ``long``.
+
+ Instead of using this function, you can use:
+
+ >>> from future.builtins import int
+ >>> isinstance(obj, int)
+
+ The following idiom is equivalent:
+
+ >>> from numbers import Integral
+ >>> isinstance(obj, Integral)
+ """
+
+ return isinstance(obj, numbers.Integral)
+
+
+def native(obj):
+ """
+ On Py3, this is a no-op: native(obj) -> obj
+
+ On Py2, returns the corresponding native Py2 types that are
+ superclasses for backported objects from Py3:
+
+ >>> from builtins import str, bytes, int
+
+ >>> native(str(u'ABC'))
+ u'ABC'
+ >>> type(native(str(u'ABC')))
+ unicode
+
+ >>> native(bytes(b'ABC'))
+ b'ABC'
+ >>> type(native(bytes(b'ABC')))
+ bytes
+
+ >>> native(int(10**20))
+ 100000000000000000000L
+ >>> type(native(int(10**20)))
+ long
+
+ Existing native types on Py2 will be returned unchanged:
+
+ >>> type(native(u'ABC'))
+ unicode
+ """
+ if hasattr(obj, '__native__'):
+ return obj.__native__()
+ else:
+ return obj
+
+
+# Implementation of exec_ is from ``six``:
+if PY3:
+ import builtins
+ exec_ = getattr(builtins, "exec")
+else:
+ def exec_(code, globs=None, locs=None):
+ """Execute code in a namespace."""
+ if globs is None:
+ frame = sys._getframe(1)
+ globs = frame.f_globals
+ if locs is None:
+ locs = frame.f_locals
+ del frame
+ elif locs is None:
+ locs = globs
+ exec("""exec code in globs, locs""")
+
+
+# Defined here for backward compatibility:
+def old_div(a, b):
+ """
+ DEPRECATED: import ``old_div`` from ``past.utils`` instead.
+
+ Equivalent to ``a / b`` on Python 2 without ``from __future__ import
+ division``.
+
+ TODO: generalize this to other objects (like arrays etc.)
+ """
+ if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral):
+ return a // b
+ else:
+ return a / b
+
+
+def as_native_str(encoding='utf-8'):
+ '''
+ A decorator to turn a function or method call that returns text, i.e.
+ unicode, into one that returns a native platform str.
+
+ Use it as a decorator like this::
+
+ from __future__ import unicode_literals
+
+ class MyClass(object):
+ @as_native_str(encoding='ascii')
+ def __repr__(self):
+ return next(self._iter).upper()
+ '''
+ if PY3:
+ return lambda f: f
+ else:
+ def encoder(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ return f(*args, **kwargs).encode(encoding=encoding)
+ return wrapper
+ return encoder
+
+# listvalues and listitems definitions from Nick Coghlan's (withdrawn)
+# PEP 496:
+try:
+ dict.iteritems
+except AttributeError:
+ # Python 3
+ def listvalues(d):
+ return list(d.values())
+ def listitems(d):
+ return list(d.items())
+else:
+ # Python 2
+ def listvalues(d):
+ return d.values()
+ def listitems(d):
+ return d.items()
+
+if PY3:
+ def ensure_new_type(obj):
+ return obj
+else:
+ def ensure_new_type(obj):
+ from future.types.newbytes import newbytes
+ from future.types.newstr import newstr
+ from future.types.newint import newint
+ from future.types.newdict import newdict
+
+ native_type = type(native(obj))
+
+ # Upcast only if the type is already a native (non-future) type
+ if issubclass(native_type, type(obj)):
+ # Upcast
+ if native_type == str: # i.e. Py2 8-bit str
+ return newbytes(obj)
+ elif native_type == unicode:
+ return newstr(obj)
+ elif native_type == int:
+ return newint(obj)
+ elif native_type == long:
+ return newint(obj)
+ elif native_type == dict:
+ return newdict(obj)
+ else:
+ return obj
+ else:
+ # Already a new type
+ assert type(obj) in [newbytes, newstr]
+ return obj
+
+
+__all__ = ['PY2', 'PY26', 'PY3', 'PYPY',
+ 'as_native_str', 'bind_method', 'bord', 'bstr',
+ 'bytes_to_native_str', 'encode_filename', 'ensure_new_type',
+ 'exec_', 'get_next', 'getexception', 'implements_iterator',
+ 'is_new_style', 'isbytes', 'isidentifier', 'isint',
+ 'isnewbytes', 'istext', 'iteritems', 'iterkeys', 'itervalues',
+ 'lfilter', 'listitems', 'listvalues', 'lmap', 'lrange',
+ 'lzip', 'native', 'native_bytes', 'native_str',
+ 'native_str_to_bytes', 'old_div',
+ 'python_2_unicode_compatible', 'raise_',
+ 'raise_with_traceback', 'reraise', 'text_to_native_str',
+ 'tobytes', 'viewitems', 'viewkeys', 'viewvalues',
+ 'with_metaclass'
+ ]
diff --git a/.install/.kodi/addons/script.module.future/libs/future/utils/surrogateescape.py b/.install/.kodi/addons/script.module.future/libs/future/utils/surrogateescape.py
new file mode 100644
index 000000000..0dcc9fa6e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/future/utils/surrogateescape.py
@@ -0,0 +1,198 @@
+"""
+This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error
+handler of Python 3.
+
+Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc
+"""
+
+# This code is released under the Python license and the BSD 2-clause license
+
+import codecs
+import sys
+
+from future import utils
+
+
+FS_ERRORS = 'surrogateescape'
+
+# # -- Python 2/3 compatibility -------------------------------------
+# FS_ERRORS = 'my_surrogateescape'
+
+def u(text):
+ if utils.PY3:
+ return text
+ else:
+ return text.decode('unicode_escape')
+
+def b(data):
+ if utils.PY3:
+ return data.encode('latin1')
+ else:
+ return data
+
+if utils.PY3:
+ _unichr = chr
+ bytes_chr = lambda code: bytes((code,))
+else:
+ _unichr = unichr
+ bytes_chr = chr
+
+def surrogateescape_handler(exc):
+ """
+ Pure Python implementation of the PEP 383: the "surrogateescape" error
+ handler of Python 3. Undecodable bytes will be replaced by a Unicode
+ character U+DCxx on decoding, and these are translated into the
+ original bytes on encoding.
+ """
+ mystring = exc.object[exc.start:exc.end]
+
+ try:
+ if isinstance(exc, UnicodeDecodeError):
+ # mystring is a byte-string in this case
+ decoded = replace_surrogate_decode(mystring)
+ elif isinstance(exc, UnicodeEncodeError):
+ # In the case of u'\udcc3'.encode('ascii',
+ # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an
+ # exception anyway after this function is called, even though I think
+ # it's doing what it should. It seems that the strict encoder is called
+ # to encode the unicode string that this function returns ...
+ decoded = replace_surrogate_encode(mystring)
+ else:
+ raise exc
+ except NotASurrogateError:
+ raise exc
+ return (decoded, exc.end)
+
+
+class NotASurrogateError(Exception):
+ pass
+
+
+def replace_surrogate_encode(mystring):
+ """
+ Returns a (unicode) string, not the more logical bytes, because the codecs
+ register_error functionality expects this.
+ """
+ decoded = []
+ for ch in mystring:
+ # if utils.PY3:
+ # code = ch
+ # else:
+ code = ord(ch)
+
+ # The following magic comes from Py3.3's Python/codecs.c file:
+ if not 0xD800 <= code <= 0xDCFF:
+ # Not a surrogate. Fail with the original exception.
+ raise NotASurrogateError
+ # mybytes = [0xe0 | (code >> 12),
+ # 0x80 | ((code >> 6) & 0x3f),
+ # 0x80 | (code & 0x3f)]
+ # Is this a good idea?
+ if 0xDC00 <= code <= 0xDC7F:
+ decoded.append(_unichr(code - 0xDC00))
+ elif code <= 0xDCFF:
+ decoded.append(_unichr(code - 0xDC00))
+ else:
+ raise NotASurrogateError
+ return str().join(decoded)
+
+
+def replace_surrogate_decode(mybytes):
+ """
+ Returns a (unicode) string
+ """
+ decoded = []
+ for ch in mybytes:
+ # We may be parsing newbytes (in which case ch is an int) or a native
+ # str on Py2
+ if isinstance(ch, int):
+ code = ch
+ else:
+ code = ord(ch)
+ if 0x80 <= code <= 0xFF:
+ decoded.append(_unichr(0xDC00 + code))
+ elif code <= 0x7F:
+ decoded.append(_unichr(code))
+ else:
+ # # It may be a bad byte
+ # # Try swallowing it.
+ # continue
+ # print("RAISE!")
+ raise NotASurrogateError
+ return str().join(decoded)
+
+
+def encodefilename(fn):
+ if FS_ENCODING == 'ascii':
+ # ASCII encoder of Python 2 expects that the error handler returns a
+ # Unicode string encodable to ASCII, whereas our surrogateescape error
+ # handler has to return bytes in 0x80-0xFF range.
+ encoded = []
+ for index, ch in enumerate(fn):
+ code = ord(ch)
+ if code < 128:
+ ch = bytes_chr(code)
+ elif 0xDC80 <= code <= 0xDCFF:
+ ch = bytes_chr(code - 0xDC00)
+ else:
+ raise UnicodeEncodeError(FS_ENCODING,
+ fn, index, index+1,
+ 'ordinal not in range(128)')
+ encoded.append(ch)
+ return bytes().join(encoded)
+ elif FS_ENCODING == 'utf-8':
+ # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF
+ # doesn't go through our error handler
+ encoded = []
+ for index, ch in enumerate(fn):
+ code = ord(ch)
+ if 0xD800 <= code <= 0xDFFF:
+ if 0xDC80 <= code <= 0xDCFF:
+ ch = bytes_chr(code - 0xDC00)
+ encoded.append(ch)
+ else:
+ raise UnicodeEncodeError(
+ FS_ENCODING,
+ fn, index, index+1, 'surrogates not allowed')
+ else:
+ ch_utf8 = ch.encode('utf-8')
+ encoded.append(ch_utf8)
+ return bytes().join(encoded)
+ else:
+ return fn.encode(FS_ENCODING, FS_ERRORS)
+
+def decodefilename(fn):
+ return fn.decode(FS_ENCODING, FS_ERRORS)
+
+FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]')
+# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]')
+# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]')
+
+
+# normalize the filesystem encoding name.
+# For example, we expect "utf-8", not "UTF8".
+FS_ENCODING = codecs.lookup(FS_ENCODING).name
+
+
+def register_surrogateescape():
+ """
+ Registers the surrogateescape error handler on Python 2 (only)
+ """
+ if utils.PY3:
+ return
+ try:
+ codecs.lookup_error(FS_ERRORS)
+ except LookupError:
+ codecs.register_error(FS_ERRORS, surrogateescape_handler)
+
+
+if __name__ == '__main__':
+ pass
+ # # Tests:
+ # register_surrogateescape()
+
+ # b = decodefilename(fn)
+ # assert b == encoded, "%r != %r" % (b, encoded)
+ # c = encodefilename(b)
+ # assert c == fn, '%r != %r' % (c, fn)
+ # # print("ok")
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/__init__.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/__init__.py
new file mode 100644
index 000000000..4cb1cbcd6
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/__init__.py
@@ -0,0 +1 @@
+# empty to make this a package
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixer_util.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixer_util.py
new file mode 100644
index 000000000..48e4689db
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixer_util.py
@@ -0,0 +1,520 @@
+"""
+Utility functions from 2to3, 3to2 and python-modernize (and some home-grown
+ones).
+
+Licences:
+2to3: PSF License v2
+3to2: Apache Software License (from 3to2/setup.py)
+python-modernize licence: BSD (from python-modernize/LICENSE)
+"""
+
+from lib2to3.fixer_util import (FromImport, Newline, is_import,
+ find_root, does_tree_import, Comma)
+from lib2to3.pytree import Leaf, Node
+from lib2to3.pygram import python_symbols as syms, python_grammar
+from lib2to3.pygram import token
+from lib2to3.fixer_util import (Node, Call, Name, syms, Comma, Number)
+import re
+
+
+def canonical_fix_name(fix, avail_fixes):
+ """
+ Examples:
+ >>> canonical_fix_name('fix_wrap_text_literals')
+ 'libfuturize.fixes.fix_wrap_text_literals'
+ >>> canonical_fix_name('wrap_text_literals')
+ 'libfuturize.fixes.fix_wrap_text_literals'
+ >>> canonical_fix_name('wrap_te')
+ ValueError("unknown fixer name")
+ >>> canonical_fix_name('wrap')
+ ValueError("ambiguous fixer name")
+ """
+ if ".fix_" in fix:
+ return fix
+ else:
+ if fix.startswith('fix_'):
+ fix = fix[4:]
+ # Infer the full module name for the fixer.
+ # First ensure that no names clash (e.g.
+ # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
+ found = [f for f in avail_fixes
+ if f.endswith('fix_{0}'.format(fix))]
+ if len(found) > 1:
+ raise ValueError("Ambiguous fixer name. Choose a fully qualified "
+ "module name instead from these:\n" +
+ "\n".join(" " + myf for myf in found))
+ elif len(found) == 0:
+ raise ValueError("Unknown fixer. Use --list-fixes or -l for a list.")
+ return found[0]
+
+
+
+## These functions are from 3to2 by Joe Amenta:
+
+def Star(prefix=None):
+ return Leaf(token.STAR, u'*', prefix=prefix)
+
+def DoubleStar(prefix=None):
+ return Leaf(token.DOUBLESTAR, u'**', prefix=prefix)
+
+def Minus(prefix=None):
+ return Leaf(token.MINUS, u'-', prefix=prefix)
+
+def commatize(leafs):
+ """
+ Accepts/turns: (Name, Name, ..., Name, Name)
+ Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name)
+ """
+ new_leafs = []
+ for leaf in leafs:
+ new_leafs.append(leaf)
+ new_leafs.append(Comma())
+ del new_leafs[-1]
+ return new_leafs
+
+def indentation(node):
+ """
+ Returns the indentation for this node
+ Iff a node is in a suite, then it has indentation.
+ """
+ while node.parent is not None and node.parent.type != syms.suite:
+ node = node.parent
+ if node.parent is None:
+ return u""
+ # The first three children of a suite are NEWLINE, INDENT, (some other node)
+ # INDENT.value contains the indentation for this suite
+ # anything after (some other node) has the indentation as its prefix.
+ if node.type == token.INDENT:
+ return node.value
+ elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT:
+ return node.prev_sibling.value
+ elif node.prev_sibling is None:
+ return u""
+ else:
+ return node.prefix
+
+def indentation_step(node):
+ """
+ Dirty little trick to get the difference between each indentation level
+ Implemented by finding the shortest indentation string
+ (technically, the "least" of all of the indentation strings, but
+ tabs and spaces mixed won't get this far, so those are synonymous.)
+ """
+ r = find_root(node)
+ # Collect all indentations into one set.
+ all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT)
+ if not all_indents:
+ # nothing is indented anywhere, so we get to pick what we want
+ return u" " # four spaces is a popular convention
+ else:
+ return min(all_indents)
+
+def suitify(parent):
+ """
+ Turn the stuff after the first colon in parent's children
+ into a suite, if it wasn't already
+ """
+ for node in parent.children:
+ if node.type == syms.suite:
+ # already in the prefered format, do nothing
+ return
+
+ # One-liners have no suite node, we have to fake one up
+ for i, node in enumerate(parent.children):
+ if node.type == token.COLON:
+ break
+ else:
+ raise ValueError(u"No class suite and no ':'!")
+ # Move everything into a suite node
+ suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))])
+ one_node = parent.children[i+1]
+ one_node.remove()
+ one_node.prefix = u''
+ suite.append_child(one_node)
+ parent.append_child(suite)
+
+def NameImport(package, as_name=None, prefix=None):
+ """
+ Accepts a package (Name node), name to import it as (string), and
+ optional prefix and returns a node:
+ import [as ]
+ """
+ if prefix is None:
+ prefix = u""
+ children = [Name(u"import", prefix=prefix), package]
+ if as_name is not None:
+ children.extend([Name(u"as", prefix=u" "),
+ Name(as_name, prefix=u" ")])
+ return Node(syms.import_name, children)
+
+_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt)
+_import_stmts = (syms.import_name, syms.import_from)
+
+def import_binding_scope(node):
+ """
+ Generator yields all nodes for which a node (an import_stmt) has scope
+ The purpose of this is for a call to _find() on each of them
+ """
+ # import_name / import_from are small_stmts
+ assert node.type in _import_stmts
+ test = node.next_sibling
+ # A small_stmt can only be followed by a SEMI or a NEWLINE.
+ while test.type == token.SEMI:
+ nxt = test.next_sibling
+ # A SEMI can only be followed by a small_stmt or a NEWLINE
+ if nxt.type == token.NEWLINE:
+ break
+ else:
+ yield nxt
+ # A small_stmt can only be followed by either a SEMI or a NEWLINE
+ test = nxt.next_sibling
+ # Covered all subsequent small_stmts after the import_stmt
+ # Now to cover all subsequent stmts after the parent simple_stmt
+ parent = node.parent
+ assert parent.type == syms.simple_stmt
+ test = parent.next_sibling
+ while test is not None:
+ # Yes, this will yield NEWLINE and DEDENT. Deal with it.
+ yield test
+ test = test.next_sibling
+
+ context = parent.parent
+ # Recursively yield nodes following imports inside of a if/while/for/try/with statement
+ if context.type in _compound_stmts:
+ # import is in a one-liner
+ c = context
+ while c.next_sibling is not None:
+ yield c.next_sibling
+ c = c.next_sibling
+ context = context.parent
+
+ # Can't chain one-liners on one line, so that takes care of that.
+
+ p = context.parent
+ if p is None:
+ return
+
+ # in a multi-line suite
+
+ while p.type in _compound_stmts:
+
+ if context.type == syms.suite:
+ yield context
+
+ context = context.next_sibling
+
+ if context is None:
+ context = p.parent
+ p = context.parent
+ if p is None:
+ break
+
+def ImportAsName(name, as_name, prefix=None):
+ new_name = Name(name)
+ new_as = Name(u"as", prefix=u" ")
+ new_as_name = Name(as_name, prefix=u" ")
+ new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name])
+ if prefix is not None:
+ new_node.prefix = prefix
+ return new_node
+
+
+def is_docstring(node):
+ """
+ Returns True if the node appears to be a docstring
+ """
+ return (node.type == syms.simple_stmt and
+ len(node.children) > 0 and node.children[0].type == token.STRING)
+
+
+def future_import(feature, node):
+ """
+ This seems to work
+ """
+ root = find_root(node)
+
+ if does_tree_import(u"__future__", feature, node):
+ return
+
+ # Look for a shebang or encoding line
+ shebang_encoding_idx = None
+
+ for idx, node in enumerate(root.children):
+ # Is it a shebang or encoding line?
+ if is_shebang_comment(node) or is_encoding_comment(node):
+ shebang_encoding_idx = idx
+ if is_docstring(node):
+ # skip over docstring
+ continue
+ names = check_future_import(node)
+ if not names:
+ # not a future statement; need to insert before this
+ break
+ if feature in names:
+ # already imported
+ return
+
+ import_ = FromImport(u'__future__', [Leaf(token.NAME, feature, prefix=" ")])
+ if shebang_encoding_idx == 0 and idx == 0:
+ # If this __future__ import would go on the first line,
+ # detach the shebang / encoding prefix from the current first line.
+ # and attach it to our new __future__ import node.
+ import_.prefix = root.children[0].prefix
+ root.children[0].prefix = u''
+ # End the __future__ import line with a newline and add a blank line
+ # afterwards:
+ children = [import_ , Newline()]
+ root.insert_child(idx, Node(syms.simple_stmt, children))
+
+
+def future_import2(feature, node):
+ """
+ An alternative to future_import() which might not work ...
+ """
+ root = find_root(node)
+
+ if does_tree_import(u"__future__", feature, node):
+ return
+
+ insert_pos = 0
+ for idx, node in enumerate(root.children):
+ if node.type == syms.simple_stmt and node.children and \
+ node.children[0].type == token.STRING:
+ insert_pos = idx + 1
+ break
+
+ for thing_after in root.children[insert_pos:]:
+ if thing_after.type == token.NEWLINE:
+ insert_pos += 1
+ continue
+
+ prefix = thing_after.prefix
+ thing_after.prefix = u""
+ break
+ else:
+ prefix = u""
+
+ import_ = FromImport(u"__future__", [Leaf(token.NAME, feature, prefix=u" ")])
+
+ children = [import_, Newline()]
+ root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix))
+
+def parse_args(arglist, scheme):
+ u"""
+ Parse a list of arguments into a dict
+ """
+ arglist = [i for i in arglist if i.type != token.COMMA]
+
+ ret_mapping = dict([(k, None) for k in scheme])
+
+ for i, arg in enumerate(arglist):
+ if arg.type == syms.argument and arg.children[1].type == token.EQUAL:
+ # argument < NAME '=' any >
+ slot = arg.children[0].value
+ ret_mapping[slot] = arg.children[2]
+ else:
+ slot = scheme[i]
+ ret_mapping[slot] = arg
+
+ return ret_mapping
+
+
+# def is_import_from(node):
+# """Returns true if the node is a statement "from ... import ..."
+# """
+# return node.type == syms.import_from
+
+
+def is_import_stmt(node):
+ return (node.type == syms.simple_stmt and node.children and
+ is_import(node.children[0]))
+
+
+def touch_import_top(package, name_to_import, node):
+ """Works like `does_tree_import` but adds an import statement at the
+ top if it was not imported (but below any __future__ imports) and below any
+ comments such as shebang lines).
+
+ Based on lib2to3.fixer_util.touch_import()
+
+ Calling this multiple times adds the imports in reverse order.
+
+ Also adds "standard_library.install_aliases()" after "from future import
+ standard_library". This should probably be factored into another function.
+ """
+
+ root = find_root(node)
+
+ if does_tree_import(package, name_to_import, root):
+ return
+
+ # Ideally, we would look for whether futurize --all-imports has been run,
+ # as indicated by the presence of ``from builtins import (ascii, ...,
+ # zip)`` -- and, if it has, we wouldn't import the name again.
+
+ # Look for __future__ imports and insert below them
+ found = False
+ for name in ['absolute_import', 'division', 'print_function',
+ 'unicode_literals']:
+ if does_tree_import('__future__', name, root):
+ found = True
+ break
+ if found:
+ # At least one __future__ import. We want to loop until we've seen them
+ # all.
+ start, end = None, None
+ for idx, node in enumerate(root.children):
+ if check_future_import(node):
+ start = idx
+ # Start looping
+ idx2 = start
+ while node:
+ node = node.next_sibling
+ idx2 += 1
+ if not check_future_import(node):
+ end = idx2
+ break
+ break
+ assert start is not None
+ assert end is not None
+ insert_pos = end
+ else:
+ # No __future__ imports.
+ # We look for a docstring and insert the new node below that. If no docstring
+ # exists, just insert the node at the top.
+ for idx, node in enumerate(root.children):
+ if node.type != syms.simple_stmt:
+ break
+ if not is_docstring(node):
+ # This is the usual case.
+ break
+ insert_pos = idx
+
+ if package is None:
+ import_ = Node(syms.import_name, [
+ Leaf(token.NAME, u"import"),
+ Leaf(token.NAME, name_to_import, prefix=u" ")
+ ])
+ else:
+ import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")])
+ if name_to_import == u'standard_library':
+ # Add:
+ # standard_library.install_aliases()
+ # after:
+ # from future import standard_library
+ install_hooks = Node(syms.simple_stmt,
+ [Node(syms.power,
+ [Leaf(token.NAME, u'standard_library'),
+ Node(syms.trailer, [Leaf(token.DOT, u'.'),
+ Leaf(token.NAME, u'install_aliases')]),
+ Node(syms.trailer, [Leaf(token.LPAR, u'('),
+ Leaf(token.RPAR, u')')])
+ ])
+ ]
+ )
+ children_hooks = [install_hooks, Newline()]
+ else:
+ children_hooks = []
+
+ # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")])
+
+ children_import = [import_, Newline()]
+ old_prefix = root.children[insert_pos].prefix
+ root.children[insert_pos].prefix = u''
+ root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix))
+ if len(children_hooks) > 0:
+ root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks))
+
+
+## The following functions are from python-modernize by Armin Ronacher:
+# (a little edited).
+
+def check_future_import(node):
+ """If this is a future import, return set of symbols that are imported,
+ else return None."""
+ # node should be the import statement here
+ savenode = node
+ if not (node.type == syms.simple_stmt and node.children):
+ return set()
+ node = node.children[0]
+ # now node is the import_from node
+ if not (node.type == syms.import_from and
+ # node.type == token.NAME and # seems to break it
+ hasattr(node.children[1], 'value') and
+ node.children[1].value == u'__future__'):
+ return set()
+ if node.children[3].type == token.LPAR:
+ node = node.children[4]
+ else:
+ node = node.children[3]
+ # now node is the import_as_name[s]
+ # print(python_grammar.number2symbol[node.type]) # breaks sometimes
+ if node.type == syms.import_as_names:
+ result = set()
+ for n in node.children:
+ if n.type == token.NAME:
+ result.add(n.value)
+ elif n.type == syms.import_as_name:
+ n = n.children[0]
+ assert n.type == token.NAME
+ result.add(n.value)
+ return result
+ elif node.type == syms.import_as_name:
+ node = node.children[0]
+ assert node.type == token.NAME
+ return set([node.value])
+ elif node.type == token.NAME:
+ return set([node.value])
+ else:
+ # TODO: handle brackets like this:
+ # from __future__ import (absolute_import, division)
+ assert False, "strange import: %s" % savenode
+
+
+SHEBANG_REGEX = r'^#!.*python'
+ENCODING_REGEX = r"^#.*coding[:=]\s*([-\w.]+)"
+
+
+def is_shebang_comment(node):
+ """
+ Comments are prefixes for Leaf nodes. Returns whether the given node has a
+ prefix that looks like a shebang line or an encoding line:
+
+ #!/usr/bin/env python
+ #!/usr/bin/python3
+ """
+ return bool(re.match(SHEBANG_REGEX, node.prefix))
+
+
+def is_encoding_comment(node):
+ """
+ Comments are prefixes for Leaf nodes. Returns whether the given node has a
+ prefix that looks like an encoding line:
+
+ # coding: utf-8
+ # encoding: utf-8
+ # -*- coding: -*-
+ # vim: set fileencoding= :
+ """
+ return bool(re.match(ENCODING_REGEX, node.prefix))
+
+
+def wrap_in_fn_call(fn_name, args, prefix=None):
+ """
+ Example:
+ >>> wrap_in_fn_call("oldstr", (arg,))
+ oldstr(arg)
+
+ >>> wrap_in_fn_call("olddiv", (arg1, arg2))
+ olddiv(arg1, arg2)
+
+ >>> wrap_in_fn_call("olddiv", [arg1, comma, arg2, comma, arg3])
+ olddiv(arg1, arg2, arg3)
+ """
+ assert len(args) > 0
+ if len(args) == 2:
+ expr1, expr2 = args
+ newargs = [expr1, Comma(), expr2]
+ else:
+ newargs = args
+ return Call(Name(fn_name), newargs, prefix=prefix)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/__init__.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/__init__.py
new file mode 100644
index 000000000..7de304da7
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/__init__.py
@@ -0,0 +1,96 @@
+import sys
+from lib2to3 import refactor
+
+# The following fixers are "safe": they convert Python 2 code to more
+# modern Python 2 code. They should be uncontroversial to apply to most
+# projects that are happy to drop support for Py2.5 and below. Applying
+# them first will reduce the size of the patch set for the real porting.
+lib2to3_fix_names_stage1 = set([
+ 'lib2to3.fixes.fix_apply',
+ 'lib2to3.fixes.fix_except',
+ 'lib2to3.fixes.fix_exec',
+ 'lib2to3.fixes.fix_exitfunc',
+ 'lib2to3.fixes.fix_funcattrs',
+ 'lib2to3.fixes.fix_has_key',
+ 'lib2to3.fixes.fix_idioms',
+ # 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import)
+ 'lib2to3.fixes.fix_intern',
+ 'lib2to3.fixes.fix_isinstance',
+ 'lib2to3.fixes.fix_methodattrs',
+ 'lib2to3.fixes.fix_ne',
+ # 'lib2to3.fixes.fix_next', # would replace ``next`` method names
+ # with ``__next__``.
+ 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755
+ 'lib2to3.fixes.fix_paren',
+ # 'lib2to3.fixes.fix_print', # see the libfuturize fixer that also
+ # adds ``from __future__ import print_function``
+ # 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions
+ 'lib2to3.fixes.fix_reduce', # reduce is available in functools on Py2.6/Py2.7
+ 'lib2to3.fixes.fix_renames', # sys.maxint -> sys.maxsize
+ # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support
+ 'lib2to3.fixes.fix_repr',
+ 'lib2to3.fixes.fix_standarderror',
+ 'lib2to3.fixes.fix_sys_exc',
+ 'lib2to3.fixes.fix_throw',
+ 'lib2to3.fixes.fix_tuple_params',
+ 'lib2to3.fixes.fix_types',
+ 'lib2to3.fixes.fix_ws_comma', # can perhaps decrease readability: see issue #58
+ 'lib2to3.fixes.fix_xreadlines',
+])
+
+# The following fixers add a dependency on the ``future`` package on order to
+# support Python 2:
+lib2to3_fix_names_stage2 = set([
+ # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this.
+ # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+
+ 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2
+ # 'lib2to3.fixes.fix_execfile', # some problems: see issue #37.
+ # We use a custom fixer instead (see below)
+ # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports
+ 'lib2to3.fixes.fix_getcwdu',
+ # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library
+ # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm)
+ 'lib2to3.fixes.fix_input',
+ 'lib2to3.fixes.fix_itertools',
+ 'lib2to3.fixes.fix_itertools_imports',
+ 'lib2to3.fixes.fix_filter',
+ 'lib2to3.fixes.fix_long',
+ 'lib2to3.fixes.fix_map',
+ # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead
+ 'lib2to3.fixes.fix_next',
+ 'lib2to3.fixes.fix_nonzero', # TODO: cause this to import ``object`` and/or add a decorator for mapping __bool__ to __nonzero__
+ 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3
+ 'lib2to3.fixes.fix_raw_input',
+ # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings
+ # 'lib2to3.fixes.fix_urllib', # included in libfuturize.fix_future_standard_library_urllib
+ # 'lib2to3.fixes.fix_xrange', # custom one because of a bug with Py3.3's lib2to3
+ 'lib2to3.fixes.fix_zip',
+])
+
+libfuturize_fix_names_stage1 = set([
+ 'libfuturize.fixes.fix_absolute_import',
+ 'libfuturize.fixes.fix_next_call', # obj.next() -> next(obj). Unlike
+ # lib2to3.fixes.fix_next, doesn't change
+ # the ``next`` method to ``__next__``.
+ 'libfuturize.fixes.fix_print_with_import',
+ 'libfuturize.fixes.fix_raise',
+ # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing
+])
+
+libfuturize_fix_names_stage2 = set([
+ 'libfuturize.fixes.fix_basestring',
+ # 'libfuturize.fixes.fix_add__future__imports_except_unicode_literals', # just in case
+ 'libfuturize.fixes.fix_cmp',
+ 'libfuturize.fixes.fix_division_safe',
+ 'libfuturize.fixes.fix_execfile',
+ 'libfuturize.fixes.fix_future_builtins',
+ 'libfuturize.fixes.fix_future_standard_library',
+ 'libfuturize.fixes.fix_future_standard_library_urllib',
+ 'libfuturize.fixes.fix_metaclass',
+ 'libpasteurize.fixes.fix_newstyle',
+ 'libfuturize.fixes.fix_object',
+ # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing
+ 'libfuturize.fixes.fix_unicode_keep_u',
+ # 'libfuturize.fixes.fix_unicode_literals_import',
+ 'libfuturize.fixes.fix_xrange_with_import', # custom one because of a bug with Py3.3's lib2to3
+])
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_UserDict.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_UserDict.py
new file mode 100644
index 000000000..cb0cfacc6
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_UserDict.py
@@ -0,0 +1,102 @@
+"""Fix UserDict.
+
+Incomplete!
+
+TODO: base this on fix_urllib perhaps?
+"""
+
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name, attr_chain
+from lib2to3.fixes.fix_imports import alternates, build_pattern, FixImports
+
+MAPPING = {'UserDict': 'collections',
+}
+
+# def alternates(members):
+# return "(" + "|".join(map(repr, members)) + ")"
+#
+#
+# def build_pattern(mapping=MAPPING):
+# mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
+# bare_names = alternates(mapping.keys())
+#
+# yield """name_import=import_name< 'import' ((%s) |
+# multiple_imports=dotted_as_names< any* (%s) any* >) >
+# """ % (mod_list, mod_list)
+# yield """import_from< 'from' (%s) 'import' ['(']
+# ( any | import_as_name< any 'as' any > |
+# import_as_names< any* >) [')'] >
+# """ % mod_list
+# yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
+# multiple_imports=dotted_as_names<
+# any* dotted_as_name< (%s) 'as' any > any* >) >
+# """ % (mod_list, mod_list)
+#
+# # Find usages of module members in code e.g. thread.foo(bar)
+# yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
+
+
+# class FixUserDict(fixer_base.BaseFix):
+class FixUserdict(FixImports):
+
+ BM_compatible = True
+ keep_line_order = True
+ # This is overridden in fix_imports2.
+ mapping = MAPPING
+
+ # We want to run this fixer late, so fix_import doesn't try to make stdlib
+ # renames into relative imports.
+ run_order = 6
+
+ def build_pattern(self):
+ return "|".join(build_pattern(self.mapping))
+
+ def compile_pattern(self):
+ # We override this, so MAPPING can be pragmatically altered and the
+ # changes will be reflected in PATTERN.
+ self.PATTERN = self.build_pattern()
+ super(FixImports, self).compile_pattern()
+
+ # Don't match the node if it's within another match.
+ def match(self, node):
+ match = super(FixImports, self).match
+ results = match(node)
+ if results:
+ # Module usage could be in the trailer of an attribute lookup, so we
+ # might have nested matches when "bare_with_attr" is present.
+ if "bare_with_attr" not in results and \
+ any(match(obj) for obj in attr_chain(node, "parent")):
+ return False
+ return results
+ return False
+
+ def start_tree(self, tree, filename):
+ super(FixImports, self).start_tree(tree, filename)
+ self.replace = {}
+
+ def transform(self, node, results):
+ import_mod = results.get("module_name")
+ if import_mod:
+ mod_name = import_mod.value
+ new_name = unicode(self.mapping[mod_name])
+ import_mod.replace(Name(new_name, prefix=import_mod.prefix))
+ if "name_import" in results:
+ # If it's not a "from x import x, y" or "import x as y" import,
+ # marked its usage to be replaced.
+ self.replace[mod_name] = new_name
+ if "multiple_imports" in results:
+ # This is a nasty hack to fix multiple imports on a line (e.g.,
+ # "import StringIO, urlparse"). The problem is that I can't
+ # figure out an easy way to make a pattern recognize the keys of
+ # MAPPING randomly sprinkled in an import statement.
+ results = self.match(node)
+ if results:
+ self.transform(node, results)
+ else:
+ # Replace usage of the module.
+ bare_name = results["bare_with_attr"][0]
+ new_name = self.replace.get(bare_name.value)
+ if new_name:
+ bare_name.replace(Name(new_name, prefix=bare_name.prefix))
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_absolute_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_absolute_import.py
new file mode 100644
index 000000000..eab9c527d
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_absolute_import.py
@@ -0,0 +1,91 @@
+"""
+Fixer for import statements, with a __future__ import line.
+
+Based on lib2to3/fixes/fix_import.py, but extended slightly so it also
+supports Cython modules.
+
+If spam is being imported from the local directory, this import:
+ from spam import eggs
+becomes:
+ from __future__ import absolute_import
+ from .spam import eggs
+
+and this import:
+ import spam
+becomes:
+ from __future__ import absolute_import
+ from . import spam
+"""
+
+from os.path import dirname, join, exists, sep
+from lib2to3.fixes.fix_import import FixImport
+from lib2to3.fixer_util import FromImport, syms
+from lib2to3.fixes.fix_import import traverse_imports
+
+from libfuturize.fixer_util import future_import
+
+
+class FixAbsoluteImport(FixImport):
+ run_order = 9
+
+ def transform(self, node, results):
+ """
+ Copied from FixImport.transform(), but with this line added in
+ any modules that had implicit relative imports changed:
+
+ from __future__ import absolute_import"
+ """
+ if self.skip:
+ return
+ imp = results['imp']
+
+ if node.type == syms.import_from:
+ # Some imps are top-level (eg: 'import ham')
+ # some are first level (eg: 'import ham.eggs')
+ # some are third level (eg: 'import ham.eggs as spam')
+ # Hence, the loop
+ while not hasattr(imp, 'value'):
+ imp = imp.children[0]
+ if self.probably_a_local_import(imp.value):
+ imp.value = u"." + imp.value
+ imp.changed()
+ future_import(u"absolute_import", node)
+ else:
+ have_local = False
+ have_absolute = False
+ for mod_name in traverse_imports(imp):
+ if self.probably_a_local_import(mod_name):
+ have_local = True
+ else:
+ have_absolute = True
+ if have_absolute:
+ if have_local:
+ # We won't handle both sibling and absolute imports in the
+ # same statement at the moment.
+ self.warning(node, "absolute and local imports together")
+ return
+
+ new = FromImport(u".", [imp])
+ new.prefix = node.prefix
+ future_import(u"absolute_import", node)
+ return new
+
+ def probably_a_local_import(self, imp_name):
+ """
+ Like the corresponding method in the base class, but this also
+ supports Cython modules.
+ """
+ if imp_name.startswith(u"."):
+ # Relative imports are certainly not local imports.
+ return False
+ imp_name = imp_name.split(u".", 1)[0]
+ base_path = dirname(self.filename)
+ base_path = join(base_path, imp_name)
+ # If there is no __init__.py next to the file its not in a package
+ # so can't be a relative import.
+ if not exists(join(dirname(base_path), "__init__.py")):
+ return False
+ for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
+ if exists(base_path + ext):
+ return True
+ return False
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py
new file mode 100644
index 000000000..1d419a1c6
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py
@@ -0,0 +1,26 @@
+"""
+Fixer for adding:
+
+ from __future__ import absolute_import
+ from __future__ import division
+ from __future__ import print_function
+
+This is "stage 1": hopefully uncontroversial changes.
+
+Stage 2 adds ``unicode_literals``.
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import future_import
+
+class FixAddFutureImportsExceptUnicodeLiterals(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+
+ run_order = 9
+
+ def transform(self, node, results):
+ # Reverse order:
+ future_import(u"print_function", node)
+ future_import(u"division", node)
+ future_import(u"absolute_import", node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_basestring.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_basestring.py
new file mode 100644
index 000000000..5676d08fc
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_basestring.py
@@ -0,0 +1,17 @@
+"""
+Fixer that adds ``from past.builtins import basestring`` if there is a
+reference to ``basestring``
+"""
+
+from lib2to3 import fixer_base
+
+from libfuturize.fixer_util import touch_import_top
+
+
+class FixBasestring(fixer_base.BaseFix):
+ BM_compatible = True
+
+ PATTERN = "'basestring'"
+
+ def transform(self, node, results):
+ touch_import_top(u'past.builtins', 'basestring', node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_bytes.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_bytes.py
new file mode 100644
index 000000000..42021223a
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_bytes.py
@@ -0,0 +1,24 @@
+"""Optional fixer that changes all unprefixed string literals "..." to b"...".
+
+br'abcd' is a SyntaxError on Python 2 but valid on Python 3.
+ur'abcd' is a SyntaxError on Python 3 but valid on Python 2.
+
+"""
+from __future__ import unicode_literals
+
+import re
+from lib2to3.pgen2 import token
+from lib2to3 import fixer_base
+
+_literal_re = re.compile(r"[^bBuUrR]?[\'\"]")
+
+class FixBytes(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "STRING"
+
+ def transform(self, node, results):
+ if node.type == token.STRING:
+ if _literal_re.match(node.value):
+ new = node.clone()
+ new.value = u'b' + new.value
+ return new
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_cmp.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_cmp.py
new file mode 100644
index 000000000..762eb4b42
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_cmp.py
@@ -0,0 +1,33 @@
+# coding: utf-8
+"""
+Fixer for the cmp() function on Py2, which was removed in Py3.
+
+Adds this import line::
+
+ from past.builtins import cmp
+
+if cmp() is called in the code.
+"""
+
+from __future__ import unicode_literals
+from lib2to3 import fixer_base
+
+from libfuturize.fixer_util import touch_import_top
+
+
+expression = "name='cmp'"
+
+
+class FixCmp(fixer_base.BaseFix):
+ BM_compatible = True
+ run_order = 9
+
+ PATTERN = """
+ power<
+ ({0}) trailer< '(' args=[any] ')' >
+ rest=any* >
+ """.format(expression)
+
+ def transform(self, node, results):
+ name = results["name"]
+ touch_import_top(u'past.builtins', name.value, node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division.py
new file mode 100644
index 000000000..6975a52bb
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division.py
@@ -0,0 +1,12 @@
+"""
+UNFINISHED
+For the ``future`` package.
+
+Adds this import line:
+
+ from __future__ import division
+
+at the top so the code runs identically on Py3 and Py2.6/2.7
+"""
+
+from libpasteurize.fixes.fix_division import FixDivision
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division_safe.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division_safe.py
new file mode 100644
index 000000000..7b0f3cbd7
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division_safe.py
@@ -0,0 +1,109 @@
+"""
+For the ``future`` package.
+
+Adds this import line:
+
+ from __future__ import division
+
+at the top and changes any old-style divisions to be calls to
+past.utils.old_div so the code runs as before on Py2.6/2.7 and has the same
+behaviour on Py3.
+
+If "from __future__ import division" is already in effect, this fixer does
+nothing.
+"""
+
+import re
+import lib2to3.pytree as pytree
+from lib2to3.fixer_util import Leaf, Node, Comma
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import syms, does_tree_import
+from libfuturize.fixer_util import (token, future_import, touch_import_top,
+ wrap_in_fn_call)
+
+
+def match_division(node):
+ u"""
+ __future__.division redefines the meaning of a single slash for division,
+ so we match that and only that.
+ """
+ slash = token.SLASH
+ return node.type == slash and not node.next_sibling.type == slash and \
+ not node.prev_sibling.type == slash
+
+const_re = re.compile('^[0-9]*[.][0-9]*$')
+
+def is_floaty(node, div_idx):
+ return _is_floaty(node.children[0:div_idx]) or _is_floaty(node.children[div_idx+1:])
+
+
+def _is_floaty(expr):
+ if isinstance(expr, list):
+ expr = expr[0]
+
+ if isinstance(expr, Leaf):
+ # If it's a leaf, let's see if it's a numeric constant containing a '.'
+ return const_re.match(expr.value)
+ elif isinstance(expr, Node):
+ # If the expression is a node, let's see if it's a direct cast to float
+ if isinstance(expr.children[0], Leaf):
+ return expr.children[0].value == u'float'
+ return False
+
+def find_division(node):
+ for i, child in enumerate(node.children):
+ if match_division(child):
+ return i
+ return False
+
+def clone_div_operands(node, div_idx):
+ children = []
+ for i, child in enumerate(node.children):
+ if i == div_idx:
+ children.append(Comma())
+ else:
+ children.append(child.clone())
+
+ # Strip any leading space for the first number:
+ children[0].prefix = u''
+
+ return children
+
+class FixDivisionSafe(fixer_base.BaseFix):
+ # BM_compatible = True
+ run_order = 4 # this seems to be ignored?
+
+ _accept_type = token.SLASH
+
+ PATTERN = """
+ term<(not('/') any)+ '/' ((not('/') any))>
+ """
+
+ def start_tree(self, tree, name):
+ """
+ Skip this fixer if "__future__.division" is already imported.
+ """
+ super(FixDivisionSafe, self).start_tree(tree, name)
+ self.skip = "division" in tree.future_features
+
+ def match(self, node):
+ u"""
+ Since the tree needs to be fixed once and only once if and only if it
+ matches, we can start discarding matches after the first.
+ """
+ if node.type == self.syms.term:
+ div_idx = find_division(node)
+ if div_idx is not False:
+ # if expr1 or expr2 are obviously floats, we don't need to wrap in
+ # old_div, as the behavior of division between any number and a float
+ # should be the same in 2 or 3
+ if not is_floaty(node, div_idx):
+ return clone_div_operands(node, div_idx)
+ return False
+
+ def transform(self, node, results):
+ if self.skip:
+ return
+ future_import(u"division", node)
+ touch_import_top(u'past.utils', u'old_div', node)
+ return wrap_in_fn_call("old_div", results, prefix=node.prefix)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_execfile.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_execfile.py
new file mode 100644
index 000000000..cfe9d8d0f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_execfile.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+"""
+Fixer for the execfile() function on Py2, which was removed in Py3.
+
+The Lib/lib2to3/fixes/fix_execfile.py module has some problems: see
+python-future issue #37. This fixer merely imports execfile() from
+past.builtins and leaves the code alone.
+
+Adds this import line::
+
+ from past.builtins import execfile
+
+for the function execfile() that was removed from Py3.
+"""
+
+from __future__ import unicode_literals
+from lib2to3 import fixer_base
+
+from libfuturize.fixer_util import touch_import_top
+
+
+expression = "name='execfile'"
+
+
+class FixExecfile(fixer_base.BaseFix):
+ BM_compatible = True
+ run_order = 9
+
+ PATTERN = """
+ power<
+ ({0}) trailer< '(' args=[any] ')' >
+ rest=any* >
+ """.format(expression)
+
+ def transform(self, node, results):
+ name = results["name"]
+ touch_import_top(u'past.builtins', name.value, node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_builtins.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_builtins.py
new file mode 100644
index 000000000..eea6c6a1e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_builtins.py
@@ -0,0 +1,59 @@
+"""
+For the ``future`` package.
+
+Adds this import line::
+
+ from builtins import XYZ
+
+for each of the functions XYZ that is used in the module.
+
+Adds these imports after any other imports (in an initial block of them).
+"""
+
+from __future__ import unicode_literals
+
+from lib2to3 import fixer_base
+from lib2to3.pygram import python_symbols as syms
+from lib2to3.fixer_util import Name, Call, in_special_context
+
+from libfuturize.fixer_util import touch_import_top
+
+# All builtins are:
+# from future.builtins.iterators import (filter, map, zip)
+# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super)
+# from future.types import (bytes, dict, int, range, str)
+# We don't need isinstance any more.
+
+replaced_builtin_fns = '''filter map zip
+ ascii chr hex input next oct
+ bytes range str raw_input'''.split()
+ # This includes raw_input as a workaround for the
+ # lib2to3 fixer for raw_input on Py3 (only), allowing
+ # the correct import to be included. (Py3 seems to run
+ # the fixers the wrong way around, perhaps ignoring the
+ # run_order class attribute below ...)
+
+expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtin_fns])
+
+
+class FixFutureBuiltins(fixer_base.BaseFix):
+ BM_compatible = True
+ run_order = 7
+
+ # Currently we only match uses as a function. This doesn't match e.g.:
+ # if isinstance(s, str):
+ # ...
+ PATTERN = """
+ power<
+ ({0}) trailer< '(' [arglist=any] ')' >
+ rest=any* >
+ |
+ power<
+ 'map' trailer< '(' [arglist=any] ')' >
+ >
+ """.format(expression)
+
+ def transform(self, node, results):
+ name = results["name"]
+ touch_import_top(u'builtins', name.value, node)
+ # name.replace(Name(u"input", prefix=name.prefix))
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library.py
new file mode 100644
index 000000000..a1c3f3d4e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library.py
@@ -0,0 +1,24 @@
+"""
+For the ``future`` package.
+
+Changes any imports needed to reflect the standard library reorganization. Also
+Also adds these import lines:
+
+ from future import standard_library
+ standard_library.install_aliases()
+
+after any __future__ imports but before any other imports.
+"""
+
+from lib2to3.fixes.fix_imports import FixImports
+from libfuturize.fixer_util import touch_import_top
+
+
+class FixFutureStandardLibrary(FixImports):
+ run_order = 8
+
+ def transform(self, node, results):
+ result = super(FixFutureStandardLibrary, self).transform(node, results)
+ # TODO: add a blank line between any __future__ imports and this?
+ touch_import_top(u'future', u'standard_library', node)
+ return result
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library_urllib.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library_urllib.py
new file mode 100644
index 000000000..cf6738845
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library_urllib.py
@@ -0,0 +1,28 @@
+"""
+For the ``future`` package.
+
+A special fixer that ensures that these lines have been added::
+
+ from future import standard_library
+ standard_library.install_hooks()
+
+even if the only module imported was ``urllib``, in which case the regular fixer
+wouldn't have added these lines.
+
+"""
+
+from lib2to3.fixes.fix_urllib import FixUrllib
+from libfuturize.fixer_util import touch_import_top, find_root
+
+
+class FixFutureStandardLibraryUrllib(FixUrllib): # not a subclass of FixImports
+ run_order = 8
+
+ def transform(self, node, results):
+ # transform_member() in lib2to3/fixes/fix_urllib.py breaks node so find_root(node)
+ # no longer works after the super() call below. So we find the root first:
+ root = find_root(node)
+ result = super(FixFutureStandardLibraryUrllib, self).transform(node, results)
+ # TODO: add a blank line between any __future__ imports and this?
+ touch_import_top(u'future', u'standard_library', root)
+ return result
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_metaclass.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_metaclass.py
new file mode 100644
index 000000000..2ac41c972
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_metaclass.py
@@ -0,0 +1,262 @@
+# coding: utf-8
+"""Fixer for __metaclass__ = X -> (future.utils.with_metaclass(X)) methods.
+
+ The various forms of classef (inherits nothing, inherits once, inherints
+ many) don't parse the same in the CST so we look at ALL classes for
+ a __metaclass__ and if we find one normalize the inherits to all be
+ an arglist.
+
+ For one-liner classes ('class X: pass') there is no indent/dedent so
+ we normalize those into having a suite.
+
+ Moving the __metaclass__ into the classdef can also cause the class
+ body to be empty so there is some special casing for that as well.
+
+ This fixer also tries very hard to keep original indenting and spacing
+ in all those corner cases.
+"""
+# This is a derived work of Lib/lib2to3/fixes/fix_metaclass.py under the
+# copyright of the Python Software Foundation, licensed under the Python
+# Software Foundation License 2.
+#
+# Copyright notice:
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013 Python Software Foundation. All rights reserved.
+#
+# Full license text: http://docs.python.org/3.4/license.html
+
+# Author: Jack Diederich, Daniel Neuhäuser
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.pygram import token
+from lib2to3.fixer_util import Name, syms, Node, Leaf, touch_import, Call, \
+ String, Comma, parenthesize
+
+
+def has_metaclass(parent):
+ """ we have to check the cls_node without changing it.
+ There are two possiblities:
+ 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
+ 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
+ """
+ for node in parent.children:
+ if node.type == syms.suite:
+ return has_metaclass(node)
+ elif node.type == syms.simple_stmt and node.children:
+ expr_node = node.children[0]
+ if expr_node.type == syms.expr_stmt and expr_node.children:
+ left_side = expr_node.children[0]
+ if isinstance(left_side, Leaf) and \
+ left_side.value == '__metaclass__':
+ return True
+ return False
+
+
+def fixup_parse_tree(cls_node):
+ """ one-line classes don't get a suite in the parse tree so we add
+ one to normalize the tree
+ """
+ for node in cls_node.children:
+ if node.type == syms.suite:
+ # already in the preferred format, do nothing
+ return
+
+ # !%@#! oneliners have no suite node, we have to fake one up
+ for i, node in enumerate(cls_node.children):
+ if node.type == token.COLON:
+ break
+ else:
+ raise ValueError("No class suite and no ':'!")
+
+ # move everything into a suite node
+ suite = Node(syms.suite, [])
+ while cls_node.children[i+1:]:
+ move_node = cls_node.children[i+1]
+ suite.append_child(move_node.clone())
+ move_node.remove()
+ cls_node.append_child(suite)
+ node = suite
+
+
+def fixup_simple_stmt(parent, i, stmt_node):
+ """ if there is a semi-colon all the parts count as part of the same
+ simple_stmt. We just want the __metaclass__ part so we move
+ everything efter the semi-colon into its own simple_stmt node
+ """
+ for semi_ind, node in enumerate(stmt_node.children):
+ if node.type == token.SEMI: # *sigh*
+ break
+ else:
+ return
+
+ node.remove() # kill the semicolon
+ new_expr = Node(syms.expr_stmt, [])
+ new_stmt = Node(syms.simple_stmt, [new_expr])
+ while stmt_node.children[semi_ind:]:
+ move_node = stmt_node.children[semi_ind]
+ new_expr.append_child(move_node.clone())
+ move_node.remove()
+ parent.insert_child(i, new_stmt)
+ new_leaf1 = new_stmt.children[0].children[0]
+ old_leaf1 = stmt_node.children[0].children[0]
+ new_leaf1.prefix = old_leaf1.prefix
+
+
+def remove_trailing_newline(node):
+ if node.children and node.children[-1].type == token.NEWLINE:
+ node.children[-1].remove()
+
+
+def find_metas(cls_node):
+ # find the suite node (Mmm, sweet nodes)
+ for node in cls_node.children:
+ if node.type == syms.suite:
+ break
+ else:
+ raise ValueError("No class suite!")
+
+ # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ]
+ for i, simple_node in list(enumerate(node.children)):
+ if simple_node.type == syms.simple_stmt and simple_node.children:
+ expr_node = simple_node.children[0]
+ if expr_node.type == syms.expr_stmt and expr_node.children:
+ # Check if the expr_node is a simple assignment.
+ left_node = expr_node.children[0]
+ if isinstance(left_node, Leaf) and \
+ left_node.value == u'__metaclass__':
+ # We found a assignment to __metaclass__.
+ fixup_simple_stmt(node, i, simple_node)
+ remove_trailing_newline(simple_node)
+ yield (node, i, simple_node)
+
+
+def fixup_indent(suite):
+ """ If an INDENT is followed by a thing with a prefix then nuke the prefix
+ Otherwise we get in trouble when removing __metaclass__ at suite start
+ """
+ kids = suite.children[::-1]
+ # find the first indent
+ while kids:
+ node = kids.pop()
+ if node.type == token.INDENT:
+ break
+
+ # find the first Leaf
+ while kids:
+ node = kids.pop()
+ if isinstance(node, Leaf) and node.type != token.DEDENT:
+ if node.prefix:
+ node.prefix = u''
+ return
+ else:
+ kids.extend(node.children[::-1])
+
+
+class FixMetaclass(fixer_base.BaseFix):
+ BM_compatible = True
+
+ PATTERN = """
+ classdef
+ """
+
+ def transform(self, node, results):
+ if not has_metaclass(node):
+ return
+
+ fixup_parse_tree(node)
+
+ # find metaclasses, keep the last one
+ last_metaclass = None
+ for suite, i, stmt in find_metas(node):
+ last_metaclass = stmt
+ stmt.remove()
+
+ text_type = node.children[0].type # always Leaf(nnn, 'class')
+
+ # figure out what kind of classdef we have
+ if len(node.children) == 7:
+ # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite])
+ # 0 1 2 3 4 5 6
+ if node.children[3].type == syms.arglist:
+ arglist = node.children[3]
+ # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite])
+ else:
+ parent = node.children[3].clone()
+ arglist = Node(syms.arglist, [parent])
+ node.set_child(3, arglist)
+ elif len(node.children) == 6:
+ # Node(classdef, ['class', 'name', '(', ')', ':', suite])
+ # 0 1 2 3 4 5
+ arglist = Node(syms.arglist, [])
+ node.insert_child(3, arglist)
+ elif len(node.children) == 4:
+ # Node(classdef, ['class', 'name', ':', suite])
+ # 0 1 2 3
+ arglist = Node(syms.arglist, [])
+ node.insert_child(2, Leaf(token.RPAR, u')'))
+ node.insert_child(2, arglist)
+ node.insert_child(2, Leaf(token.LPAR, u'('))
+ else:
+ raise ValueError("Unexpected class definition")
+
+ # now stick the metaclass in the arglist
+ meta_txt = last_metaclass.children[0].children[0]
+ meta_txt.value = 'metaclass'
+ orig_meta_prefix = meta_txt.prefix
+
+ # Was: touch_import(None, u'future.utils', node)
+ touch_import(u'future.utils', u'with_metaclass', node)
+
+ metaclass = last_metaclass.children[0].children[2].clone()
+ metaclass.prefix = u''
+
+ arguments = [metaclass]
+
+ if arglist.children:
+ if len(arglist.children) == 1:
+ base = arglist.children[0].clone()
+ base.prefix = u' '
+ else:
+ # Unfortunately six.with_metaclass() only allows one base
+ # class, so we have to dynamically generate a base class if
+ # there is more than one.
+ bases = parenthesize(arglist.clone())
+ bases.prefix = u' '
+ base = Call(Name('type'), [
+ String("'NewBase'"),
+ Comma(),
+ bases,
+ Comma(),
+ Node(
+ syms.atom,
+ [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')],
+ prefix=u' '
+ )
+ ], prefix=u' ')
+ arguments.extend([Comma(), base])
+
+ arglist.replace(Call(
+ Name(u'with_metaclass', prefix=arglist.prefix),
+ arguments
+ ))
+
+ fixup_indent(suite)
+
+ # check for empty suite
+ if not suite.children:
+ # one-liner that was just __metaclass_
+ suite.remove()
+ pass_leaf = Leaf(text_type, u'pass')
+ pass_leaf.prefix = orig_meta_prefix
+ node.append_child(pass_leaf)
+ node.append_child(Leaf(token.NEWLINE, u'\n'))
+
+ elif len(suite.children) > 1 and \
+ (suite.children[-2].type == token.INDENT and
+ suite.children[-1].type == token.DEDENT):
+ # there was only one line in the class body and it was __metaclass__
+ pass_leaf = Leaf(text_type, u'pass')
+ suite.insert_child(-1, pass_leaf)
+ suite.insert_child(-1, Leaf(token.NEWLINE, u'\n'))
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_next_call.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_next_call.py
new file mode 100644
index 000000000..282f18522
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_next_call.py
@@ -0,0 +1,104 @@
+"""
+Based on fix_next.py by Collin Winter.
+
+Replaces it.next() -> next(it), per PEP 3114.
+
+Unlike fix_next.py, this fixer doesn't replace the name of a next method with __next__,
+which would break Python 2 compatibility without further help from fixers in
+stage 2.
+"""
+
+# Local imports
+from lib2to3.pgen2 import token
+from lib2to3.pygram import python_symbols as syms
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name, Call, find_binding
+
+bind_warning = "Calls to builtin next() possibly shadowed by global binding"
+
+
+class FixNextCall(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = """
+ power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
+ |
+ power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
+ |
+ global=global_stmt< 'global' any* 'next' any* >
+ """
+
+ order = "pre" # Pre-order tree traversal
+
+ def start_tree(self, tree, filename):
+ super(FixNextCall, self).start_tree(tree, filename)
+
+ n = find_binding('next', tree)
+ if n:
+ self.warning(n, bind_warning)
+ self.shadowed_next = True
+ else:
+ self.shadowed_next = False
+
+ def transform(self, node, results):
+ assert results
+
+ base = results.get("base")
+ attr = results.get("attr")
+ name = results.get("name")
+
+ if base:
+ if self.shadowed_next:
+ # Omit this:
+ # attr.replace(Name("__next__", prefix=attr.prefix))
+ pass
+ else:
+ base = [n.clone() for n in base]
+ base[0].prefix = ""
+ node.replace(Call(Name("next", prefix=node.prefix), base))
+ elif name:
+ # Omit this:
+ # n = Name("__next__", prefix=name.prefix)
+ # name.replace(n)
+ pass
+ elif attr:
+ # We don't do this transformation if we're assigning to "x.next".
+ # Unfortunately, it doesn't seem possible to do this in PATTERN,
+ # so it's being done here.
+ if is_assign_target(node):
+ head = results["head"]
+ if "".join([str(n) for n in head]).strip() == '__builtin__':
+ self.warning(node, bind_warning)
+ return
+ # Omit this:
+ # attr.replace(Name("__next__"))
+ elif "global" in results:
+ self.warning(node, bind_warning)
+ self.shadowed_next = True
+
+
+### The following functions help test if node is part of an assignment
+### target.
+
+def is_assign_target(node):
+ assign = find_assign(node)
+ if assign is None:
+ return False
+
+ for child in assign.children:
+ if child.type == token.EQUAL:
+ return False
+ elif is_subtree(child, node):
+ return True
+ return False
+
+def find_assign(node):
+ if node.type == syms.expr_stmt:
+ return node
+ if node.type == syms.simple_stmt or node.parent is None:
+ return None
+ return find_assign(node.parent)
+
+def is_subtree(root, node):
+ if root == node:
+ return True
+ return any(is_subtree(c, node) for c in root.children)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_object.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_object.py
new file mode 100644
index 000000000..accf2c52e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_object.py
@@ -0,0 +1,17 @@
+"""
+Fixer that adds ``from builtins import object`` if there is a line
+like this:
+ class Foo(object):
+"""
+
+from lib2to3 import fixer_base
+
+from libfuturize.fixer_util import touch_import_top
+
+
+class FixObject(fixer_base.BaseFix):
+
+ PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >"
+
+ def transform(self, node, results):
+ touch_import_top(u'builtins', 'object', node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_oldstr_wrap.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_oldstr_wrap.py
new file mode 100644
index 000000000..ad58771d5
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_oldstr_wrap.py
@@ -0,0 +1,39 @@
+"""
+For the ``future`` package.
+
+Adds this import line:
+
+ from past.builtins import str as oldstr
+
+at the top and wraps any unadorned string literals 'abc' or explicit byte-string
+literals b'abc' in oldstr() calls so the code has the same behaviour on Py3 as
+on Py2.6/2.7.
+"""
+
+from __future__ import unicode_literals
+import re
+from lib2to3 import fixer_base
+from lib2to3.pgen2 import token
+from lib2to3.fixer_util import syms
+from libfuturize.fixer_util import (future_import, touch_import_top,
+ wrap_in_fn_call)
+
+
+_literal_re = re.compile(r"[^uUrR]?[\'\"]")
+
+
+class FixOldstrWrap(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "STRING"
+
+ def transform(self, node, results):
+ if node.type == token.STRING:
+ touch_import_top(u'past.types', u'oldstr', node)
+ if _literal_re.match(node.value):
+ new = node.clone()
+ # Strip any leading space or comments:
+ # TODO: check: do we really want to do this?
+ new.prefix = u''
+ new.value = u'b' + new.value
+ wrapped = wrap_in_fn_call("oldstr", [new], prefix=node.prefix)
+ return wrapped
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_order___future__imports.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_order___future__imports.py
new file mode 100644
index 000000000..00d7ef606
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_order___future__imports.py
@@ -0,0 +1,36 @@
+"""
+UNFINISHED
+
+Fixer for turning multiple lines like these:
+
+ from __future__ import division
+ from __future__ import absolute_import
+ from __future__ import print_function
+
+into a single line like this:
+
+ from __future__ import (absolute_import, division, print_function)
+
+This helps with testing of ``futurize``.
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import future_import
+
+class FixOrderFutureImports(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+
+ run_order = 10
+
+ # def match(self, node):
+ # """
+ # Match only once per file
+ # """
+ # if hasattr(node, 'type') and node.type == syms.file_input:
+ # return True
+ # return False
+
+ def transform(self, node, results):
+ # TODO # write me
+ pass
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print.py
new file mode 100644
index 000000000..247b91b84
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print.py
@@ -0,0 +1,94 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for print.
+
+Change:
+ "print" into "print()"
+ "print ..." into "print(...)"
+ "print(...)" not changed
+ "print ... ," into "print(..., end=' ')"
+ "print >>x, ..." into "print(..., file=x)"
+
+No changes are applied if print_function is imported from __future__
+
+"""
+
+# Local imports
+from lib2to3 import patcomp, pytree, fixer_base
+from lib2to3.pgen2 import token
+from lib2to3.fixer_util import Name, Call, Comma, String
+# from libmodernize import add_future
+
+parend_expr = patcomp.compile_pattern(
+ """atom< '(' [arith_expr|atom|power|term|STRING|NAME] ')' >"""
+ )
+
+
+class FixPrint(fixer_base.BaseFix):
+
+ BM_compatible = True
+
+ PATTERN = """
+ simple_stmt< any* bare='print' any* > | print_stmt
+ """
+
+ def transform(self, node, results):
+ assert results
+
+ bare_print = results.get("bare")
+
+ if bare_print:
+ # Special-case print all by itself.
+ bare_print.replace(Call(Name(u"print"), [],
+ prefix=bare_print.prefix))
+ # The "from __future__ import print_function"" declaration is added
+ # by the fix_print_with_import fixer, so we skip it here.
+ # add_future(node, u'print_function')
+ return
+ assert node.children[0] == Name(u"print")
+ args = node.children[1:]
+ if len(args) == 1 and parend_expr.match(args[0]):
+ # We don't want to keep sticking parens around an
+ # already-parenthesised expression.
+ return
+
+ sep = end = file = None
+ if args and args[-1] == Comma():
+ args = args[:-1]
+ end = " "
+ if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"):
+ assert len(args) >= 2
+ file = args[1].clone()
+ args = args[3:] # Strip a possible comma after the file expression
+ # Now synthesize a print(args, sep=..., end=..., file=...) node.
+ l_args = [arg.clone() for arg in args]
+ if l_args:
+ l_args[0].prefix = u""
+ if sep is not None or end is not None or file is not None:
+ if sep is not None:
+ self.add_kwarg(l_args, u"sep", String(repr(sep)))
+ if end is not None:
+ self.add_kwarg(l_args, u"end", String(repr(end)))
+ if file is not None:
+ self.add_kwarg(l_args, u"file", file)
+ n_stmt = Call(Name(u"print"), l_args)
+ n_stmt.prefix = node.prefix
+
+ # Note that there are corner cases where adding this future-import is
+ # incorrect, for example when the file also has a 'print ()' statement
+ # that was intended to print "()".
+ # add_future(node, u'print_function')
+ return n_stmt
+
+ def add_kwarg(self, l_nodes, s_kwd, n_expr):
+ # XXX All this prefix-setting may lose comments (though rarely)
+ n_expr.prefix = u""
+ n_argument = pytree.Node(self.syms.argument,
+ (Name(s_kwd),
+ pytree.Leaf(token.EQUAL, u"="),
+ n_expr))
+ if l_nodes:
+ l_nodes.append(Comma())
+ n_argument.prefix = u" "
+ l_nodes.append(n_argument)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print_with_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print_with_import.py
new file mode 100644
index 000000000..344904610
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print_with_import.py
@@ -0,0 +1,22 @@
+"""
+For the ``future`` package.
+
+Turns any print statements into functions and adds this import line:
+
+ from __future__ import print_function
+
+at the top to retain compatibility with Python 2.6+.
+"""
+
+from libfuturize.fixes.fix_print import FixPrint
+from libfuturize.fixer_util import future_import
+
+class FixPrintWithImport(FixPrint):
+ run_order = 7
+ def transform(self, node, results):
+ # Add the __future__ import first. (Otherwise any shebang or encoding
+ # comment line attached as a prefix to the print statement will be
+ # copied twice and appear twice.)
+ future_import(u'print_function', node)
+ n_stmt = super(FixPrintWithImport, self).transform(node, results)
+ return n_stmt
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_raise.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_raise.py
new file mode 100644
index 000000000..3e8323de2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_raise.py
@@ -0,0 +1,73 @@
+"""Fixer for 'raise E, V'
+
+From Armin Ronacher's ``python-modernize``.
+
+raise -> raise
+raise E -> raise E
+raise E, V -> raise E(V)
+
+raise (((E, E'), E''), E'''), V -> raise E(V)
+
+
+CAVEATS:
+1) "raise E, V" will be incorrectly translated if V is an exception
+ instance. The correct Python 3 idiom is
+
+ raise E from V
+
+ but since we can't detect instance-hood by syntax alone and since
+ any client code would have to be changed as well, we don't automate
+ this.
+"""
+# Author: Collin Winter, Armin Ronacher
+
+# Local imports
+from lib2to3 import pytree, fixer_base
+from lib2to3.pgen2 import token
+from lib2to3.fixer_util import Name, Call, is_tuple
+
+class FixRaise(fixer_base.BaseFix):
+
+ BM_compatible = True
+ PATTERN = """
+ raise_stmt< 'raise' exc=any [',' val=any] >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+
+ exc = results["exc"].clone()
+ if exc.type == token.STRING:
+ msg = "Python 3 does not support string exceptions"
+ self.cannot_convert(node, msg)
+ return
+
+ # Python 2 supports
+ # raise ((((E1, E2), E3), E4), E5), V
+ # as a synonym for
+ # raise E1, V
+ # Since Python 3 will not support this, we recurse down any tuple
+ # literals, always taking the first element.
+ if is_tuple(exc):
+ while is_tuple(exc):
+ # exc.children[1:-1] is the unparenthesized tuple
+ # exc.children[1].children[0] is the first element of the tuple
+ exc = exc.children[1].children[0].clone()
+ exc.prefix = u" "
+
+ if "val" not in results:
+ # One-argument raise
+ new = pytree.Node(syms.raise_stmt, [Name(u"raise"), exc])
+ new.prefix = node.prefix
+ return new
+
+ val = results["val"].clone()
+ if is_tuple(val):
+ args = [c.clone() for c in val.children[1:-1]]
+ else:
+ val.prefix = u""
+ args = [val]
+
+ return pytree.Node(syms.raise_stmt,
+ [Name(u"raise"), Call(exc, args)],
+ prefix=node.prefix)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_remove_old__future__imports.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_remove_old__future__imports.py
new file mode 100644
index 000000000..9336f75f3
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_remove_old__future__imports.py
@@ -0,0 +1,26 @@
+"""
+Fixer for removing any of these lines:
+
+ from __future__ import with_statement
+ from __future__ import nested_scopes
+ from __future__ import generators
+
+The reason is that __future__ imports like these are required to be the first
+line of code (after docstrings) on Python 2.6+, which can get in the way.
+
+These imports are always enabled in Python 2.6+, which is the minimum sane
+version to target for Py2/3 compatibility.
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import remove_future_import
+
+class FixRemoveOldFutureImports(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+ run_order = 1
+
+ def transform(self, node, results):
+ remove_future_import(u"with_statement", node)
+ remove_future_import(u"nested_scopes", node)
+ remove_future_import(u"generators", node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_keep_u.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_keep_u.py
new file mode 100644
index 000000000..2e9a4e476
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_keep_u.py
@@ -0,0 +1,24 @@
+"""Fixer that changes unicode to str and unichr to chr, but -- unlike the
+lib2to3 fix_unicode.py fixer, does not change u"..." into "...".
+
+The reason is that Py3.3+ supports the u"..." string prefix, and, if
+present, the prefix may provide useful information for disambiguating
+between byte strings and unicode strings, which is often the hardest part
+of the porting task.
+
+"""
+
+from lib2to3.pgen2 import token
+from lib2to3 import fixer_base
+
+_mapping = {u"unichr" : u"chr", u"unicode" : u"str"}
+
+class FixUnicodeKeepU(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "'unicode' | 'unichr'"
+
+ def transform(self, node, results):
+ if node.type == token.NAME:
+ new = node.clone()
+ new.value = _mapping[node.value]
+ return new
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_literals_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_literals_import.py
new file mode 100644
index 000000000..51c50620b
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_literals_import.py
@@ -0,0 +1,18 @@
+"""
+Adds this import:
+
+ from __future__ import unicode_literals
+
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import future_import
+
+class FixUnicodeLiteralsImport(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+
+ run_order = 9
+
+ def transform(self, node, results):
+ future_import(u"unicode_literals", node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_xrange_with_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_xrange_with_import.py
new file mode 100644
index 000000000..c910f8165
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_xrange_with_import.py
@@ -0,0 +1,20 @@
+"""
+For the ``future`` package.
+
+Turns any xrange calls into range calls and adds this import line:
+
+ from builtins import range
+
+at the top.
+"""
+
+from lib2to3.fixes.fix_xrange import FixXrange
+
+from libfuturize.fixer_util import touch_import_top
+
+
+class FixXrangeWithImport(FixXrange):
+ def transform(self, node, results):
+ result = super(FixXrangeWithImport, self).transform(node, results)
+ touch_import_top('builtins', 'range', node)
+ return result
diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/main.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/main.py
new file mode 100644
index 000000000..634c2f25e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/main.py
@@ -0,0 +1,322 @@
+"""
+futurize: automatic conversion to clean 2/3 code using ``python-future``
+======================================================================
+
+Like Armin Ronacher's modernize.py, ``futurize`` attempts to produce clean
+standard Python 3 code that runs on both Py2 and Py3.
+
+One pass
+--------
+
+Use it like this on Python 2 code:
+
+ $ futurize --verbose mypython2script.py
+
+This will attempt to port the code to standard Py3 code that also
+provides Py2 compatibility with the help of the right imports from
+``future``.
+
+To write changes to the files, use the -w flag.
+
+Two stages
+----------
+
+The ``futurize`` script can also be called in two separate stages. First:
+
+ $ futurize --stage1 mypython2script.py
+
+This produces more modern Python 2 code that is not yet compatible with Python
+3. The tests should still run and the diff should be uncontroversial to apply to
+most Python projects that are willing to drop support for Python 2.5 and lower.
+
+After this, the recommended approach is to explicitly mark all strings that must
+be byte-strings with a b'' prefix and all text (unicode) strings with a u''
+prefix, and then invoke the second stage of Python 2 to 2/3 conversion with::
+
+ $ futurize --stage2 mypython2script.py
+
+Stage 2 adds a dependency on ``future``. It converts most remaining Python
+2-specific code to Python 3 code and adds appropriate imports from ``future``
+to restore Py2 support.
+
+The command above leaves all unadorned string literals as native strings
+(byte-strings on Py2, unicode strings on Py3). If instead you would like all
+unadorned string literals to be promoted to unicode, you can also pass this
+flag:
+
+ $ futurize --stage2 --unicode-literals mypython2script.py
+
+This adds the declaration ``from __future__ import unicode_literals`` to the
+top of each file, which implicitly declares all unadorned string literals to be
+unicode strings (``unicode`` on Py2).
+
+All imports
+-----------
+
+The --all-imports option forces adding all ``__future__`` imports,
+``builtins`` imports, and standard library aliases, even if they don't
+seem necessary for the current state of each module. (This can simplify
+testing, and can reduce the need to think about Py2 compatibility when editing
+the code further.)
+
+"""
+
+from __future__ import (absolute_import, print_function, unicode_literals)
+import future.utils
+from future import __version__
+
+import sys
+import logging
+import optparse
+import os
+
+from lib2to3.main import warn, StdoutRefactoringTool
+from lib2to3 import refactor
+
+from libfuturize.fixes import (lib2to3_fix_names_stage1,
+ lib2to3_fix_names_stage2,
+ libfuturize_fix_names_stage1,
+ libfuturize_fix_names_stage2)
+
+fixer_pkg = 'libfuturize.fixes'
+
+
+def main(args=None):
+ """Main program.
+
+ Args:
+ fixer_pkg: the name of a package where the fixers are located.
+ args: optional; a list of command line arguments. If omitted,
+ sys.argv[1:] is used.
+
+ Returns a suggested exit status (0, 1, 2).
+ """
+
+ # Set up option parser
+ parser = optparse.OptionParser(usage="futurize [options] file|dir ...")
+ parser.add_option("-V", "--version", action="store_true",
+ help="Report the version number of futurize")
+ parser.add_option("-a", "--all-imports", action="store_true",
+ help="Add all __future__ and future imports to each module")
+ parser.add_option("-1", "--stage1", action="store_true",
+ help="Modernize Python 2 code only; no compatibility with Python 3 (or dependency on ``future``)")
+ parser.add_option("-2", "--stage2", action="store_true",
+ help="Take modernized (stage1) code and add a dependency on ``future`` to provide Py3 compatibility.")
+ parser.add_option("-0", "--both-stages", action="store_true",
+ help="Apply both stages 1 and 2")
+ parser.add_option("-u", "--unicode-literals", action="store_true",
+ help="Add ``from __future__ import unicode_literals`` to implicitly convert all unadorned string literals '' into unicode strings")
+ parser.add_option("-f", "--fix", action="append", default=[],
+ help="Each FIX specifies a transformation; default: all.\nEither use '-f division -f metaclass' etc. or use the fully-qualified module name: '-f lib2to3.fixes.fix_types -f libfuturize.fixes.fix_unicode_keep_u'")
+ parser.add_option("-j", "--processes", action="store", default=1,
+ type="int", help="Run 2to3 concurrently")
+ parser.add_option("-x", "--nofix", action="append", default=[],
+ help="Prevent a fixer from being run.")
+ parser.add_option("-l", "--list-fixes", action="store_true",
+ help="List available transformations")
+ parser.add_option("-p", "--print-function", action="store_true",
+ help="Modify the grammar so that print() is a function")
+ parser.add_option("-v", "--verbose", action="store_true",
+ help="More verbose logging")
+ parser.add_option("--no-diffs", action="store_true",
+ help="Don't show diffs of the refactoring")
+ parser.add_option("-w", "--write", action="store_true",
+ help="Write back modified files")
+ parser.add_option("-n", "--nobackups", action="store_true", default=False,
+ help="Don't write backups for modified files.")
+ parser.add_option("-o", "--output-dir", action="store", type="str",
+ default="", help="Put output files in this directory "
+ "instead of overwriting the input files. Requires -n. "
+ "For Python >= 2.7 only.")
+ parser.add_option("-W", "--write-unchanged-files", action="store_true",
+ help="Also write files even if no changes were required"
+ " (useful with --output-dir); implies -w.")
+ parser.add_option("--add-suffix", action="store", type="str", default="",
+ help="Append this string to all output filenames."
+ " Requires -n if non-empty. For Python >= 2.7 only."
+ "ex: --add-suffix='3' will generate .py3 files.")
+
+ # Parse command line arguments
+ flags = {}
+ refactor_stdin = False
+ options, args = parser.parse_args(args)
+
+ if options.write_unchanged_files:
+ flags["write_unchanged_files"] = True
+ if not options.write:
+ warn("--write-unchanged-files/-W implies -w.")
+ options.write = True
+ # If we allowed these, the original files would be renamed to backup names
+ # but not replaced.
+ if options.output_dir and not options.nobackups:
+ parser.error("Can't use --output-dir/-o without -n.")
+ if options.add_suffix and not options.nobackups:
+ parser.error("Can't use --add-suffix without -n.")
+
+ if not options.write and options.no_diffs:
+ warn("not writing files and not printing diffs; that's not very useful")
+ if not options.write and options.nobackups:
+ parser.error("Can't use -n without -w")
+ if "-" in args:
+ refactor_stdin = True
+ if options.write:
+ print("Can't write to stdin.", file=sys.stderr)
+ return 2
+ # Is this ever necessary?
+ if options.print_function:
+ flags["print_function"] = True
+
+ # Set up logging handler
+ level = logging.DEBUG if options.verbose else logging.INFO
+ logging.basicConfig(format='%(name)s: %(message)s', level=level)
+ logger = logging.getLogger('libfuturize.main')
+
+ if options.stage1 or options.stage2:
+ assert options.both_stages is None
+ options.both_stages = False
+ else:
+ options.both_stages = True
+
+ avail_fixes = set()
+
+ if options.stage1 or options.both_stages:
+ avail_fixes.update(lib2to3_fix_names_stage1)
+ avail_fixes.update(libfuturize_fix_names_stage1)
+ if options.stage2 or options.both_stages:
+ avail_fixes.update(lib2to3_fix_names_stage2)
+ avail_fixes.update(libfuturize_fix_names_stage2)
+
+ if options.unicode_literals:
+ avail_fixes.add('libfuturize.fixes.fix_unicode_literals_import')
+
+ if options.version:
+ print(__version__)
+ return 0
+ if options.list_fixes:
+ print("Available transformations for the -f/--fix option:")
+ # for fixname in sorted(refactor.get_all_fix_names(fixer_pkg)):
+ for fixname in sorted(avail_fixes):
+ print(fixname)
+ if not args:
+ return 0
+ if not args:
+ print("At least one file or directory argument required.",
+ file=sys.stderr)
+ print("Use --help to show usage.", file=sys.stderr)
+ return 2
+
+ unwanted_fixes = set()
+ for fix in options.nofix:
+ if ".fix_" in fix:
+ unwanted_fixes.add(fix)
+ else:
+ # Infer the full module name for the fixer.
+ # First ensure that no names clash (e.g.
+ # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
+ found = [f for f in avail_fixes
+ if f.endswith('fix_{0}'.format(fix))]
+ if len(found) > 1:
+ print("Ambiguous fixer name. Choose a fully qualified "
+ "module name instead from these:\n" +
+ "\n".join(" " + myf for myf in found),
+ file=sys.stderr)
+ return 2
+ elif len(found) == 0:
+ print("Unknown fixer. Use --list-fixes or -l for a list.",
+ file=sys.stderr)
+ return 2
+ unwanted_fixes.add(found[0])
+
+ extra_fixes = set()
+ if options.all_imports:
+ if options.stage1:
+ prefix = 'libfuturize.fixes.'
+ extra_fixes.add(prefix +
+ 'fix_add__future__imports_except_unicode_literals')
+ else:
+ # In case the user hasn't run stage1 for some reason:
+ prefix = 'libpasteurize.fixes.'
+ extra_fixes.add(prefix + 'fix_add_all__future__imports')
+ extra_fixes.add(prefix + 'fix_add_future_standard_library_import')
+ extra_fixes.add(prefix + 'fix_add_all_future_builtins')
+ explicit = set()
+ if options.fix:
+ all_present = False
+ for fix in options.fix:
+ if fix == 'all':
+ all_present = True
+ else:
+ if ".fix_" in fix:
+ explicit.add(fix)
+ else:
+ # Infer the full module name for the fixer.
+ # First ensure that no names clash (e.g.
+ # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
+ found = [f for f in avail_fixes
+ if f.endswith('fix_{0}'.format(fix))]
+ if len(found) > 1:
+ print("Ambiguous fixer name. Choose a fully qualified "
+ "module name instead from these:\n" +
+ "\n".join(" " + myf for myf in found),
+ file=sys.stderr)
+ return 2
+ elif len(found) == 0:
+ print("Unknown fixer. Use --list-fixes or -l for a list.",
+ file=sys.stderr)
+ return 2
+ explicit.add(found[0])
+ if len(explicit & unwanted_fixes) > 0:
+ print("Conflicting usage: the following fixers have been "
+ "simultaneously requested and disallowed:\n" +
+ "\n".join(" " + myf for myf in (explicit & unwanted_fixes)),
+ file=sys.stderr)
+ return 2
+ requested = avail_fixes.union(explicit) if all_present else explicit
+ else:
+ requested = avail_fixes.union(explicit)
+ fixer_names = (requested | extra_fixes) - unwanted_fixes
+
+ input_base_dir = os.path.commonprefix(args)
+ if (input_base_dir and not input_base_dir.endswith(os.sep)
+ and not os.path.isdir(input_base_dir)):
+ # One or more similar names were passed, their directory is the base.
+ # os.path.commonprefix() is ignorant of path elements, this corrects
+ # for that weird API.
+ input_base_dir = os.path.dirname(input_base_dir)
+ if options.output_dir:
+ input_base_dir = input_base_dir.rstrip(os.sep)
+ logger.info('Output in %r will mirror the input directory %r layout.',
+ options.output_dir, input_base_dir)
+
+ # Initialize the refactoring tool
+ if future.utils.PY26:
+ extra_kwargs = {}
+ else:
+ extra_kwargs = {
+ 'append_suffix': options.add_suffix,
+ 'output_dir': options.output_dir,
+ 'input_base_dir': input_base_dir,
+ }
+
+ rt = StdoutRefactoringTool(
+ sorted(fixer_names), flags, sorted(explicit),
+ options.nobackups, not options.no_diffs,
+ **extra_kwargs)
+
+ # Refactor all files and directories passed as arguments
+ if not rt.errors:
+ if refactor_stdin:
+ rt.refactor_stdin()
+ else:
+ try:
+ rt.refactor(args, options.write, None,
+ options.processes)
+ except refactor.MultiprocessingUnsupported:
+ assert options.processes > 1
+ print("Sorry, -j isn't " \
+ "supported on this platform.", file=sys.stderr)
+ return 1
+ rt.summarize()
+
+ # Return error status (0 if rt.errors is zero)
+ return int(bool(rt.errors))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/__init__.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/__init__.py
new file mode 100644
index 000000000..4cb1cbcd6
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/__init__.py
@@ -0,0 +1 @@
+# empty to make this a package
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/__init__.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/__init__.py
new file mode 100644
index 000000000..905aec47e
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/__init__.py
@@ -0,0 +1,54 @@
+import sys
+from lib2to3 import refactor
+
+# The original set of these fixes comes from lib3to2 (https://bitbucket.org/amentajo/lib3to2):
+fix_names = set([
+ 'libpasteurize.fixes.fix_add_all__future__imports', # from __future__ import absolute_import etc. on separate lines
+ 'libpasteurize.fixes.fix_add_future_standard_library_import', # we force adding this import for now, even if it doesn't seem necessary to the fix_future_standard_library fixer, for ease of testing
+ # 'libfuturize.fixes.fix_order___future__imports', # consolidates to a single line to simplify testing -- UNFINISHED
+ 'libpasteurize.fixes.fix_future_builtins', # adds "from future.builtins import *"
+ 'libfuturize.fixes.fix_future_standard_library', # adds "from future import standard_library"
+
+ 'libpasteurize.fixes.fix_annotations',
+ # 'libpasteurize.fixes.fix_bitlength', # ints have this in Py2.7
+ # 'libpasteurize.fixes.fix_bool', # need a decorator or Mixin
+ # 'libpasteurize.fixes.fix_bytes', # leave bytes as bytes
+ # 'libpasteurize.fixes.fix_classdecorator', # available in
+ # Py2.6+
+ # 'libpasteurize.fixes.fix_collections', hmmm ...
+ # 'libpasteurize.fixes.fix_dctsetcomp', # avail in Py27
+ 'libpasteurize.fixes.fix_division', # yes
+ # 'libpasteurize.fixes.fix_except', # avail in Py2.6+
+ # 'libpasteurize.fixes.fix_features', # ?
+ 'libpasteurize.fixes.fix_fullargspec',
+ # 'libpasteurize.fixes.fix_funcattrs',
+ 'libpasteurize.fixes.fix_getcwd',
+ 'libpasteurize.fixes.fix_imports', # adds "from future import standard_library"
+ 'libpasteurize.fixes.fix_imports2',
+ # 'libpasteurize.fixes.fix_input',
+ # 'libpasteurize.fixes.fix_int',
+ # 'libpasteurize.fixes.fix_intern',
+ # 'libpasteurize.fixes.fix_itertools',
+ 'libpasteurize.fixes.fix_kwargs', # yes, we want this
+ # 'libpasteurize.fixes.fix_memoryview',
+ # 'libpasteurize.fixes.fix_metaclass', # write a custom handler for
+ # this
+ # 'libpasteurize.fixes.fix_methodattrs', # __func__ and __self__ seem to be defined on Py2.7 already
+ 'libpasteurize.fixes.fix_newstyle', # yes, we want this: explicit inheritance from object. Without new-style classes in Py2, super() will break etc.
+ # 'libpasteurize.fixes.fix_next', # use a decorator for this
+ # 'libpasteurize.fixes.fix_numliterals', # prob not
+ # 'libpasteurize.fixes.fix_open', # huh?
+ # 'libpasteurize.fixes.fix_print', # no way
+ 'libpasteurize.fixes.fix_printfunction', # adds __future__ import print_function
+ # 'libpasteurize.fixes.fix_raise_', # TODO: get this working!
+
+ # 'libpasteurize.fixes.fix_range', # nope
+ # 'libpasteurize.fixes.fix_reduce',
+ # 'libpasteurize.fixes.fix_setliteral',
+ # 'libpasteurize.fixes.fix_str',
+ # 'libpasteurize.fixes.fix_super', # maybe, if our magic super() isn't robust enough
+ 'libpasteurize.fixes.fix_throw', # yes, if Py3 supports it
+ # 'libpasteurize.fixes.fix_unittest',
+ 'libpasteurize.fixes.fix_unpacking', # yes, this is useful
+ # 'libpasteurize.fixes.fix_with' # way out of date
+ ])
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/feature_base.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/feature_base.py
new file mode 100644
index 000000000..c36d9a951
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/feature_base.py
@@ -0,0 +1,57 @@
+u"""
+Base classes for features that are backwards-incompatible.
+
+Usage:
+features = Features()
+features.add(Feature("py3k_feature", "power< 'py3k' any* >", "2.7"))
+PATTERN = features.PATTERN
+"""
+
+pattern_unformatted = u"%s=%s" # name=pattern, for dict lookups
+message_unformatted = u"""
+%s is only supported in Python %s and above."""
+
+class Feature(object):
+ u"""
+ A feature has a name, a pattern, and a minimum version of Python 2.x
+ required to use the feature (or 3.x if there is no backwards-compatible
+ version of 2.x)
+ """
+ def __init__(self, name, PATTERN, version):
+ self.name = name
+ self._pattern = PATTERN
+ self.version = version
+
+ def message_text(self):
+ u"""
+ Format the above text with the name and minimum version required.
+ """
+ return message_unformatted % (self.name, self.version)
+
+class Features(set):
+ u"""
+ A set of features that generates a pattern for the features it contains.
+ This set will act like a mapping in that we map names to patterns.
+ """
+ mapping = {}
+
+ def update_mapping(self):
+ u"""
+ Called every time we care about the mapping of names to features.
+ """
+ self.mapping = dict([(f.name, f) for f in iter(self)])
+
+ @property
+ def PATTERN(self):
+ u"""
+ Uses the mapping of names to features to return a PATTERN suitable
+ for using the lib2to3 patcomp.
+ """
+ self.update_mapping()
+ return u" |\n".join([pattern_unformatted % (f.name, f._pattern) for f in iter(self)])
+
+ def __getitem__(self, key):
+ u"""
+ Implement a simple mapping to get patterns from names.
+ """
+ return self.mapping[key]
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all__future__imports.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all__future__imports.py
new file mode 100644
index 000000000..378979461
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all__future__imports.py
@@ -0,0 +1,24 @@
+"""
+Fixer for adding:
+
+ from __future__ import absolute_import
+ from __future__ import division
+ from __future__ import print_function
+ from __future__ import unicode_literals
+
+This is done when converting from Py3 to both Py3/Py2.
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import future_import
+
+class FixAddAllFutureImports(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+ run_order = 1
+
+ def transform(self, node, results):
+ future_import(u"unicode_literals", node)
+ future_import(u"print_function", node)
+ future_import(u"division", node)
+ future_import(u"absolute_import", node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all_future_builtins.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all_future_builtins.py
new file mode 100644
index 000000000..22911bada
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all_future_builtins.py
@@ -0,0 +1,37 @@
+"""
+For the ``future`` package.
+
+Adds this import line::
+
+ from builtins import (ascii, bytes, chr, dict, filter, hex, input,
+ int, list, map, next, object, oct, open, pow,
+ range, round, str, super, zip)
+
+to a module, irrespective of whether each definition is used.
+
+Adds these imports after any other imports (in an initial block of them).
+"""
+
+from __future__ import unicode_literals
+
+from lib2to3 import fixer_base
+
+from libfuturize.fixer_util import touch_import_top
+
+
+class FixAddAllFutureBuiltins(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+ run_order = 1
+
+ def transform(self, node, results):
+ # import_str = """(ascii, bytes, chr, dict, filter, hex, input,
+ # int, list, map, next, object, oct, open, pow,
+ # range, round, str, super, zip)"""
+ touch_import_top(u'builtins', '*', node)
+
+ # builtins = """ascii bytes chr dict filter hex input
+ # int list map next object oct open pow
+ # range round str super zip"""
+ # for builtin in sorted(builtins.split(), reverse=True):
+ # touch_import_top(u'builtins', builtin, node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_future_standard_library_import.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_future_standard_library_import.py
new file mode 100644
index 000000000..0778406a8
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_future_standard_library_import.py
@@ -0,0 +1,23 @@
+"""
+For the ``future`` package.
+
+Adds this import line:
+
+ from future import standard_library
+
+after any __future__ imports but before any other imports. Doesn't actually
+change the imports to Py3 style.
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import touch_import_top
+
+class FixAddFutureStandardLibraryImport(fixer_base.BaseFix):
+ BM_compatible = True
+ PATTERN = "file_input"
+ run_order = 8
+
+ def transform(self, node, results):
+ # TODO: add a blank line between any __future__ imports and this?
+ touch_import_top(u'future', u'standard_library', node)
+ # TODO: also add standard_library.install_hooks()
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_annotations.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_annotations.py
new file mode 100644
index 000000000..884b67411
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_annotations.py
@@ -0,0 +1,48 @@
+u"""
+Fixer to remove function annotations
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.pgen2 import token
+from lib2to3.fixer_util import syms
+
+warning_text = u"Removing function annotations completely."
+
+def param_without_annotations(node):
+ return node.children[0]
+
+class FixAnnotations(fixer_base.BaseFix):
+
+ warned = False
+
+ def warn_once(self, node, reason):
+ if not self.warned:
+ self.warned = True
+ self.warning(node, reason=reason)
+
+ PATTERN = u"""
+ funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* >
+ """
+
+ def transform(self, node, results):
+ u"""
+ This just strips annotations from the funcdef completely.
+ """
+ params = results.get(u"params")
+ ret = results.get(u"ret")
+ if ret is not None:
+ assert ret.prev_sibling.type == token.RARROW, u"Invalid return annotation"
+ self.warn_once(node, reason=warning_text)
+ ret.prev_sibling.remove()
+ ret.remove()
+ if params is None: return
+ if params.type == syms.typedargslist:
+ # more than one param in a typedargslist
+ for param in params.children:
+ if param.type == syms.tname:
+ self.warn_once(node, reason=warning_text)
+ param.replace(param_without_annotations(param))
+ elif params.type == syms.tname:
+ # one param
+ self.warn_once(node, reason=warning_text)
+ params.replace(param_without_annotations(params))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_division.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_division.py
new file mode 100644
index 000000000..6a048710f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_division.py
@@ -0,0 +1,28 @@
+u"""
+Fixer for division: from __future__ import division if needed
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import token, future_import
+
+def match_division(node):
+ u"""
+ __future__.division redefines the meaning of a single slash for division,
+ so we match that and only that.
+ """
+ slash = token.SLASH
+ return node.type == slash and not node.next_sibling.type == slash and \
+ not node.prev_sibling.type == slash
+
+class FixDivision(fixer_base.BaseFix):
+ run_order = 4 # this seems to be ignored?
+
+ def match(self, node):
+ u"""
+ Since the tree needs to be fixed once and only once if and only if it
+ matches, then we can start discarding matches after we make the first.
+ """
+ return match_division(node)
+
+ def transform(self, node, results):
+ future_import(u"division", node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_features.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_features.py
new file mode 100644
index 000000000..52630f982
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_features.py
@@ -0,0 +1,86 @@
+u"""
+Warn about features that are not present in Python 2.5, giving a message that
+points to the earliest version of Python 2.x (or 3.x, if none) that supports it
+"""
+
+from .feature_base import Feature, Features
+from lib2to3 import fixer_base
+
+FEATURES = [
+ #(FeatureName,
+ # FeaturePattern,
+ # FeatureMinVersion,
+ #),
+ (u"memoryview",
+ u"power < 'memoryview' trailer < '(' any* ')' > any* >",
+ u"2.7",
+ ),
+ (u"numbers",
+ u"""import_from< 'from' 'numbers' 'import' any* > |
+ import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""",
+ u"2.6",
+ ),
+ (u"abc",
+ u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > |
+ import_from< 'from' 'abc' 'import' any* >""",
+ u"2.6",
+ ),
+ (u"io",
+ u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > |
+ import_from< 'from' 'io' 'import' any* >""",
+ u"2.6",
+ ),
+ (u"bin",
+ u"power< 'bin' trailer< '(' any* ')' > any* >",
+ u"2.6",
+ ),
+ (u"formatting",
+ u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >",
+ u"2.6",
+ ),
+ (u"nonlocal",
+ u"global_stmt< 'nonlocal' any* >",
+ u"3.0",
+ ),
+ (u"with_traceback",
+ u"trailer< '.' 'with_traceback' >",
+ u"3.0",
+ ),
+]
+
+class FixFeatures(fixer_base.BaseFix):
+
+ run_order = 9 # Wait until all other fixers have run to check for these
+
+ # To avoid spamming, we only want to warn for each feature once.
+ features_warned = set()
+
+ # Build features from the list above
+ features = Features([Feature(name, pattern, version) for \
+ name, pattern, version in FEATURES])
+
+ PATTERN = features.PATTERN
+
+ def match(self, node):
+ to_ret = super(FixFeatures, self).match(node)
+ # We want the mapping only to tell us the node's specific information.
+ try:
+ del to_ret[u'node']
+ except Exception:
+ # We want it to delete the 'node' from the results
+ # if it's there, so we don't care if it fails for normal reasons.
+ pass
+ return to_ret
+
+ def transform(self, node, results):
+ for feature_name in results:
+ if feature_name in self.features_warned:
+ continue
+ else:
+ curr_feature = self.features[feature_name]
+ if curr_feature.version >= u"3":
+ fail = self.cannot_convert
+ else:
+ fail = self.warning
+ fail(node, reason=curr_feature.message_text())
+ self.features_warned.add(feature_name)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_fullargspec.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_fullargspec.py
new file mode 100644
index 000000000..4bd37e151
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_fullargspec.py
@@ -0,0 +1,16 @@
+u"""
+Fixer for getfullargspec -> getargspec
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name
+
+warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent."
+
+class FixFullargspec(fixer_base.BaseFix):
+
+ PATTERN = u"'getfullargspec'"
+
+ def transform(self, node, results):
+ self.warning(node, warn_msg)
+ return Name(u"getargspec", prefix=node.prefix)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_future_builtins.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_future_builtins.py
new file mode 100644
index 000000000..684967998
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_future_builtins.py
@@ -0,0 +1,46 @@
+"""
+Adds this import line:
+
+ from builtins import XYZ
+
+for each of the functions XYZ that is used in the module.
+"""
+
+from __future__ import unicode_literals
+
+from lib2to3 import fixer_base
+from lib2to3.pygram import python_symbols as syms
+from lib2to3.fixer_util import Name, Call, in_special_context
+
+from libfuturize.fixer_util import touch_import_top
+
+# All builtins are:
+# from future.builtins.iterators import (filter, map, zip)
+# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super)
+# from future.types import (bytes, dict, int, range, str)
+# We don't need isinstance any more.
+
+replaced_builtins = '''filter map zip
+ ascii chr hex input next oct open round super
+ bytes dict int range str'''.split()
+
+expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins])
+
+
+class FixFutureBuiltins(fixer_base.BaseFix):
+ BM_compatible = True
+ run_order = 9
+
+ # Currently we only match uses as a function. This doesn't match e.g.:
+ # if isinstance(s, str):
+ # ...
+ PATTERN = """
+ power<
+ ({0}) trailer< '(' args=[any] ')' >
+ rest=any* >
+ """.format(expression)
+
+ def transform(self, node, results):
+ name = results["name"]
+ touch_import_top(u'builtins', name.value, node)
+ # name.replace(Name(u"input", prefix=name.prefix))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_getcwd.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_getcwd.py
new file mode 100644
index 000000000..9b7f002b3
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_getcwd.py
@@ -0,0 +1,26 @@
+u"""
+Fixer for os.getcwd() -> os.getcwdu().
+Also warns about "from os import getcwd", suggesting the above form.
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name
+
+class FixGetcwd(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ power< 'os' trailer< dot='.' name='getcwd' > any* >
+ |
+ import_from< 'from' 'os' 'import' bad='getcwd' >
+ """
+
+ def transform(self, node, results):
+ if u"name" in results:
+ name = results[u"name"]
+ name.replace(Name(u"getcwdu", prefix=name.prefix))
+ elif u"bad" in results:
+ # Can't convert to getcwdu and then expect to catch every use.
+ self.cannot_convert(node, u"import os, use os.getcwd() instead.")
+ return
+ else:
+ raise ValueError(u"For some reason, the pattern matcher failed.")
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports.py
new file mode 100644
index 000000000..2d6718f16
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports.py
@@ -0,0 +1,112 @@
+u"""
+Fixer for standard library imports renamed in Python 3
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import
+from lib2to3.pygram import python_symbols as syms
+from lib2to3.pgen2 import token
+from lib2to3.pytree import Node, Leaf
+
+from libfuturize.fixer_util import touch_import_top
+# from ..fixer_util import NameImport
+
+# used in simple_mapping_to_pattern()
+MAPPING = {u"reprlib": u"repr",
+ u"winreg": u"_winreg",
+ u"configparser": u"ConfigParser",
+ u"copyreg": u"copy_reg",
+ u"queue": u"Queue",
+ u"socketserver": u"SocketServer",
+ u"_markupbase": u"markupbase",
+ u"test.support": u"test.test_support",
+ u"dbm.bsd": u"dbhash",
+ u"dbm.ndbm": u"dbm",
+ u"dbm.dumb": u"dumbdbm",
+ u"dbm.gnu": u"gdbm",
+ u"html.parser": u"HTMLParser",
+ u"html.entities": u"htmlentitydefs",
+ u"http.client": u"httplib",
+ u"http.cookies": u"Cookie",
+ u"http.cookiejar": u"cookielib",
+# "tkinter": "Tkinter",
+ u"tkinter.dialog": u"Dialog",
+ u"tkinter._fix": u"FixTk",
+ u"tkinter.scrolledtext": u"ScrolledText",
+ u"tkinter.tix": u"Tix",
+ u"tkinter.constants": u"Tkconstants",
+ u"tkinter.dnd": u"Tkdnd",
+ u"tkinter.__init__": u"Tkinter",
+ u"tkinter.colorchooser": u"tkColorChooser",
+ u"tkinter.commondialog": u"tkCommonDialog",
+ u"tkinter.font": u"tkFont",
+ u"tkinter.ttk": u"ttk",
+ u"tkinter.messagebox": u"tkMessageBox",
+ u"tkinter.turtle": u"turtle",
+ u"urllib.robotparser": u"robotparser",
+ u"xmlrpc.client": u"xmlrpclib",
+ u"builtins": u"__builtin__",
+}
+
+# generic strings to help build patterns
+# these variables mean (with http.client.HTTPConnection as an example):
+# name = http
+# attr = client
+# used = HTTPConnection
+# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match)
+
+# helps match 'queue', as in 'from queue import ...'
+simple_name_match = u"name='%s'"
+# helps match 'client', to be used if client has been imported from http
+subname_match = u"attr='%s'"
+# helps match 'http.client', as in 'import urllib.request'
+dotted_name_match = u"dotted_name=dotted_name< %s '.' %s >"
+# helps match 'queue', as in 'queue.Queue(...)'
+power_onename_match = u"%s"
+# helps match 'http.client', as in 'http.client.HTTPConnection(...)'
+power_twoname_match = u"power< %s trailer< '.' %s > any* >"
+# helps match 'client.HTTPConnection', if 'client' has been imported from http
+power_subname_match = u"power< %s any* >"
+# helps match 'from http.client import HTTPConnection'
+from_import_match = u"from_import=import_from< 'from' %s 'import' imported=any >"
+# helps match 'from http import client'
+from_import_submod_match = u"from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >"
+# helps match 'import urllib.request'
+name_import_match = u"name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >"
+# helps match 'import http.client, winreg'
+multiple_name_import_match = u"name_import=import_name< 'import' dotted_as_names< names=any* > >"
+
+def all_patterns(name):
+ u"""
+ Accepts a string and returns a pattern of possible patterns involving that name
+ Called by simple_mapping_to_pattern for each name in the mapping it receives.
+ """
+
+ # i_ denotes an import-like node
+ # u_ denotes a node that appears to be a usage of the name
+ if u'.' in name:
+ name, attr = name.split(u'.', 1)
+ simple_name = simple_name_match % (name)
+ simple_attr = subname_match % (attr)
+ dotted_name = dotted_name_match % (simple_name, simple_attr)
+ i_from = from_import_match % (dotted_name)
+ i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr)
+ i_name = name_import_match % (dotted_name, dotted_name)
+ u_name = power_twoname_match % (simple_name, simple_attr)
+ u_subname = power_subname_match % (simple_attr)
+ return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname))
+ else:
+ simple_name = simple_name_match % (name)
+ i_name = name_import_match % (simple_name, simple_name)
+ i_from = from_import_match % (simple_name)
+ u_name = power_onename_match % (simple_name)
+ return u' | \n'.join((i_name, i_from, u_name))
+
+
+class FixImports(fixer_base.BaseFix):
+
+ PATTERN = u' | \n'.join([all_patterns(name) for name in MAPPING])
+ PATTERN = u' | \n'.join((PATTERN, multiple_name_import_match))
+
+ def transform(self, node, results):
+ touch_import_top(u'future', u'standard_library', node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports2.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports2.py
new file mode 100644
index 000000000..70444e9e0
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports2.py
@@ -0,0 +1,174 @@
+u"""
+Fixer for complicated imports
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma
+from libfuturize.fixer_util import touch_import_top
+
+
+TK_BASE_NAMES = (u'ACTIVE', u'ALL', u'ANCHOR', u'ARC',u'BASELINE', u'BEVEL', u'BOTH',
+ u'BOTTOM', u'BROWSE', u'BUTT', u'CASCADE', u'CENTER', u'CHAR',
+ u'CHECKBUTTON', u'CHORD', u'COMMAND', u'CURRENT', u'DISABLED',
+ u'DOTBOX', u'E', u'END', u'EW', u'EXCEPTION', u'EXTENDED', u'FALSE',
+ u'FIRST', u'FLAT', u'GROOVE', u'HIDDEN', u'HORIZONTAL', u'INSERT',
+ u'INSIDE', u'LAST', u'LEFT', u'MITER', u'MOVETO', u'MULTIPLE', u'N',
+ u'NE', u'NO', u'NONE', u'NORMAL', u'NS', u'NSEW', u'NUMERIC', u'NW',
+ u'OFF', u'ON', u'OUTSIDE', u'PAGES', u'PIESLICE', u'PROJECTING',
+ u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT',
+ u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST',
+ u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes',
+ u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE',
+ u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES',
+ u'wantobjects')
+
+PY2MODULES = {
+ u'urllib2' : (
+ u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler',
+ u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler',
+ u'FTPHandler', u'FileHandler', u'HTTPBasicAuthHandler',
+ u'HTTPCookieProcessor', u'HTTPDefaultErrorHandler',
+ u'HTTPDigestAuthHandler', u'HTTPError', u'HTTPErrorProcessor',
+ u'HTTPHandler', u'HTTPPasswordMgr',
+ u'HTTPPasswordMgrWithDefaultRealm', u'HTTPRedirectHandler',
+ u'HTTPSHandler', u'OpenerDirector', u'ProxyBasicAuthHandler',
+ u'ProxyDigestAuthHandler', u'ProxyHandler', u'Request',
+ u'StringIO', u'URLError', u'UnknownHandler', u'addinfourl',
+ u'build_opener', u'install_opener', u'parse_http_list',
+ u'parse_keqv_list', u'randombytes', u'request_host', u'urlopen'),
+ u'urllib' : (
+ u'ContentTooShortError', u'FancyURLopener',u'URLopener',
+ u'basejoin', u'ftperrors', u'getproxies',
+ u'getproxies_environment', u'localhost', u'pathname2url',
+ u'quote', u'quote_plus', u'splitattr', u'splithost',
+ u'splitnport', u'splitpasswd', u'splitport', u'splitquery',
+ u'splittag', u'splittype', u'splituser', u'splitvalue',
+ u'thishost', u'unquote', u'unquote_plus', u'unwrap',
+ u'url2pathname', u'urlcleanup', u'urlencode', u'urlopen',
+ u'urlretrieve',),
+ u'urlparse' : (
+ u'parse_qs', u'parse_qsl', u'urldefrag', u'urljoin',
+ u'urlparse', u'urlsplit', u'urlunparse', u'urlunsplit'),
+ u'dbm' : (
+ u'ndbm', u'gnu', u'dumb'),
+ u'anydbm' : (
+ u'error', u'open'),
+ u'whichdb' : (
+ u'whichdb',),
+ u'BaseHTTPServer' : (
+ u'BaseHTTPRequestHandler', u'HTTPServer'),
+ u'CGIHTTPServer' : (
+ u'CGIHTTPRequestHandler',),
+ u'SimpleHTTPServer' : (
+ u'SimpleHTTPRequestHandler',),
+ u'FileDialog' : TK_BASE_NAMES + (
+ u'FileDialog', u'LoadFileDialog', u'SaveFileDialog',
+ u'dialogstates', u'test'),
+ u'tkFileDialog' : (
+ u'Directory', u'Open', u'SaveAs', u'_Dialog', u'askdirectory',
+ u'askopenfile', u'askopenfilename', u'askopenfilenames',
+ u'askopenfiles', u'asksaveasfile', u'asksaveasfilename'),
+ u'SimpleDialog' : TK_BASE_NAMES + (
+ u'SimpleDialog',),
+ u'tkSimpleDialog' : TK_BASE_NAMES + (
+ u'askfloat', u'askinteger', u'askstring', u'Dialog'),
+ u'SimpleXMLRPCServer' : (
+ u'CGIXMLRPCRequestHandler', u'SimpleXMLRPCDispatcher',
+ u'SimpleXMLRPCRequestHandler', u'SimpleXMLRPCServer',
+ u'list_public_methods', u'remove_duplicates',
+ u'resolve_dotted_attribute'),
+ u'DocXMLRPCServer' : (
+ u'DocCGIXMLRPCRequestHandler', u'DocXMLRPCRequestHandler',
+ u'DocXMLRPCServer', u'ServerHTMLDoc',u'XMLRPCDocGenerator'),
+ }
+
+MAPPING = { u'urllib.request' :
+ (u'urllib2', u'urllib'),
+ u'urllib.error' :
+ (u'urllib2', u'urllib'),
+ u'urllib.parse' :
+ (u'urllib2', u'urllib', u'urlparse'),
+ u'dbm.__init__' :
+ (u'anydbm', u'whichdb'),
+ u'http.server' :
+ (u'CGIHTTPServer', u'SimpleHTTPServer', u'BaseHTTPServer'),
+ u'tkinter.filedialog' :
+ (u'tkFileDialog', u'FileDialog'),
+ u'tkinter.simpledialog' :
+ (u'tkSimpleDialog', u'SimpleDialog'),
+ u'xmlrpc.server' :
+ (u'DocXMLRPCServer', u'SimpleXMLRPCServer'),
+ }
+
+# helps match 'http', as in 'from http.server import ...'
+simple_name = u"name='%s'"
+# helps match 'server', as in 'from http.server import ...'
+simple_attr = u"attr='%s'"
+# helps match 'HTTPServer', as in 'from http.server import HTTPServer'
+simple_using = u"using='%s'"
+# helps match 'urllib.request', as in 'import urllib.request'
+dotted_name = u"dotted_name=dotted_name< %s '.' %s >"
+# helps match 'http.server', as in 'http.server.HTTPServer(...)'
+power_twoname = u"pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >"
+# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)'
+power_onename = u"pow=power< %s trailer< '.' using=any > any* >"
+# helps match 'from http.server import HTTPServer'
+# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler'
+# also helps match 'from http.server import *'
+from_import = u"from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >"
+# helps match 'import urllib.request'
+name_import = u"name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >"
+
+#############
+# WON'T FIX #
+#############
+
+# helps match 'import urllib.request as name'
+name_import_rename = u"name_import_rename=dotted_as_name< %s 'as' renamed=any >"
+# helps match 'from http import server'
+from_import_rename = u"from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >"
+
+
+def all_modules_subpattern():
+ u"""
+ Builds a pattern for all toplevel names
+ (urllib, http, etc)
+ """
+ names_dot_attrs = [mod.split(u".") for mod in MAPPING]
+ ret = u"( " + u" | ".join([dotted_name % (simple_name % (mod[0]),
+ simple_attr % (mod[1])) for mod in names_dot_attrs])
+ ret += u" | "
+ ret += u" | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == u"__init__"]) + u" )"
+ return ret
+
+
+def build_import_pattern(mapping1, mapping2):
+ u"""
+ mapping1: A dict mapping py3k modules to all possible py2k replacements
+ mapping2: A dict mapping py2k modules to the things they do
+ This builds a HUGE pattern to match all ways that things can be imported
+ """
+ # py3k: urllib.request, py2k: ('urllib2', 'urllib')
+ yield from_import % (all_modules_subpattern())
+ for py3k, py2k in mapping1.items():
+ name, attr = py3k.split(u'.')
+ s_name = simple_name % (name)
+ s_attr = simple_attr % (attr)
+ d_name = dotted_name % (s_name, s_attr)
+ yield name_import % (d_name)
+ yield power_twoname % (s_name, s_attr)
+ if attr == u'__init__':
+ yield name_import % (s_name)
+ yield power_onename % (s_name)
+ yield name_import_rename % (d_name)
+ yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr)
+
+
+class FixImports2(fixer_base.BaseFix):
+
+ run_order = 4
+
+ PATTERN = u" | \n".join(build_import_pattern(MAPPING, PY2MODULES))
+
+ def transform(self, node, results):
+ touch_import_top(u'future', u'standard_library', node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_kwargs.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_kwargs.py
new file mode 100644
index 000000000..290f991ee
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_kwargs.py
@@ -0,0 +1,147 @@
+u"""
+Fixer for Python 3 function parameter syntax
+This fixer is rather sensitive to incorrect py3k syntax.
+"""
+
+# Note: "relevant" parameters are parameters following the first STAR in the list.
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import token, String, Newline, Comma, Name
+from libfuturize.fixer_util import indentation, suitify, DoubleStar
+
+_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
+_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s"
+_else_template = u"else: %(name)s = %(default)s"
+_kwargs_default_name = u"_3to2kwargs"
+
+def gen_params(raw_params):
+ u"""
+ Generator that yields tuples of (name, default_value) for each parameter in the list
+ If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
+ """
+ assert raw_params[0].type == token.STAR and len(raw_params) > 2
+ curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
+ max_idx = len(raw_params)
+ while curr_idx < max_idx:
+ curr_item = raw_params[curr_idx]
+ prev_item = curr_item.prev_sibling
+ if curr_item.type != token.NAME:
+ curr_idx += 1
+ continue
+ if prev_item is not None and prev_item.type == token.DOUBLESTAR:
+ break
+ name = curr_item.value
+ nxt = curr_item.next_sibling
+ if nxt is not None and nxt.type == token.EQUAL:
+ default_value = nxt.next_sibling
+ curr_idx += 2
+ else:
+ default_value = None
+ yield (name, default_value)
+ curr_idx += 1
+
+def remove_params(raw_params, kwargs_default=_kwargs_default_name):
+ u"""
+ Removes all keyword-only args from the params list and a bare star, if any.
+ Does not add the kwargs dict if needed.
+ Returns True if more action is needed, False if not
+ (more action is needed if no kwargs dict exists)
+ """
+ assert raw_params[0].type == token.STAR
+ if raw_params[1].type == token.COMMA:
+ raw_params[0].remove()
+ raw_params[1].remove()
+ kw_params = raw_params[2:]
+ else:
+ kw_params = raw_params[3:]
+ for param in kw_params:
+ if param.type != token.DOUBLESTAR:
+ param.remove()
+ else:
+ return False
+ else:
+ return True
+
+def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
+ u"""
+ Returns string with the name of the kwargs dict if the params after the first star need fixing
+ Otherwise returns empty string
+ """
+ found_kwargs = False
+ needs_fix = False
+
+ for t in raw_params[2:]:
+ if t.type == token.COMMA:
+ # Commas are irrelevant at this stage.
+ continue
+ elif t.type == token.NAME and not found_kwargs:
+ # Keyword-only argument: definitely need to fix.
+ needs_fix = True
+ elif t.type == token.NAME and found_kwargs:
+ # Return 'foobar' of **foobar, if needed.
+ return t.value if needs_fix else u''
+ elif t.type == token.DOUBLESTAR:
+ # Found either '*' from **foobar.
+ found_kwargs = True
+ else:
+ # Never found **foobar. Return a synthetic name, if needed.
+ return kwargs_default if needs_fix else u''
+
+class FixKwargs(fixer_base.BaseFix):
+
+ run_order = 7 # Run after function annotations are removed
+
+ PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
+
+ def transform(self, node, results):
+ params_rawlist = results[u"params"]
+ for i, item in enumerate(params_rawlist):
+ if item.type == token.STAR:
+ params_rawlist = params_rawlist[i:]
+ break
+ else:
+ return
+ # params is guaranteed to be a list starting with *.
+ # if fixing is needed, there will be at least 3 items in this list:
+ # [STAR, COMMA, NAME] is the minimum that we need to worry about.
+ new_kwargs = needs_fixing(params_rawlist)
+ # new_kwargs is the name of the kwargs dictionary.
+ if not new_kwargs:
+ return
+ suitify(node)
+
+ # At this point, params_rawlist is guaranteed to be a list
+ # beginning with a star that includes at least one keyword-only param
+ # e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
+ # [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
+
+ # Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
+ # Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
+ # We need to insert our new stuff before the first_stmt and change the
+ # first_stmt's prefix.
+
+ suite = node.children[4]
+ first_stmt = suite.children[2]
+ ident = indentation(first_stmt)
+
+ for name, default_value in gen_params(params_rawlist):
+ if default_value is None:
+ suite.insert_child(2, Newline())
+ suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident))
+ else:
+ suite.insert_child(2, Newline())
+ suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident))
+ suite.insert_child(2, Newline())
+ suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident))
+ first_stmt.prefix = ident
+ suite.children[2].prefix = u""
+
+ # Now, we need to fix up the list of params.
+
+ must_add_kwargs = remove_params(params_rawlist)
+ if must_add_kwargs:
+ arglist = results[u'arglist']
+ if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
+ arglist.append_child(Comma())
+ arglist.append_child(DoubleStar(prefix=u" "))
+ arglist.append_child(Name(new_kwargs))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_memoryview.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_memoryview.py
new file mode 100644
index 000000000..a20f6f3f2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_memoryview.py
@@ -0,0 +1,21 @@
+u"""
+Fixer for memoryview(s) -> buffer(s).
+Explicit because some memoryview methods are invalid on buffer objects.
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name
+
+
+class FixMemoryview(fixer_base.BaseFix):
+
+ explicit = True # User must specify that they want this.
+
+ PATTERN = u"""
+ power< name='memoryview' trailer< '(' [any] ')' >
+ rest=any* >
+ """
+
+ def transform(self, node, results):
+ name = results[u"name"]
+ name.replace(Name(u"buffer", prefix=name.prefix))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_metaclass.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_metaclass.py
new file mode 100644
index 000000000..52dd1d145
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_metaclass.py
@@ -0,0 +1,78 @@
+u"""
+Fixer for (metaclass=X) -> __metaclass__ = X
+Some semantics (see PEP 3115) may be altered in the translation."""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name, syms, Node, Leaf, Newline, find_root
+from lib2to3.pygram import token
+from libfuturize.fixer_util import indentation, suitify
+# from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify
+
+def has_metaclass(parent):
+ results = None
+ for node in parent.children:
+ kids = node.children
+ if node.type == syms.argument:
+ if kids[0] == Leaf(token.NAME, u"metaclass") and \
+ kids[1] == Leaf(token.EQUAL, u"=") and \
+ kids[2]:
+ #Hack to avoid "class X(=):" with this case.
+ results = [node] + kids
+ break
+ elif node.type == syms.arglist:
+ # Argument list... loop through it looking for:
+ # Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)]
+ for child in node.children:
+ if results: break
+ if child.type == token.COMMA:
+ #Store the last comma, which precedes the metaclass
+ comma = child
+ elif type(child) == Node:
+ meta = equal = name = None
+ for arg in child.children:
+ if arg == Leaf(token.NAME, u"metaclass"):
+ #We have the (metaclass) part
+ meta = arg
+ elif meta and arg == Leaf(token.EQUAL, u"="):
+ #We have the (metaclass=) part
+ equal = arg
+ elif meta and equal:
+ #Here we go, we have (metaclass=X)
+ name = arg
+ results = (comma, meta, equal, name)
+ break
+ return results
+
+
+class FixMetaclass(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ classdef
+ """
+
+ def transform(self, node, results):
+ meta_results = has_metaclass(node)
+ if not meta_results: return
+ for meta in meta_results:
+ meta.remove()
+ target = Leaf(token.NAME, u"__metaclass__")
+ equal = Leaf(token.EQUAL, u"=", prefix=u" ")
+ # meta is the last item in what was returned by has_metaclass(): name
+ name = meta
+ name.prefix = u" "
+ stmt_node = Node(syms.atom, [target, equal, name])
+
+ suitify(node)
+ for item in node.children:
+ if item.type == syms.suite:
+ for stmt in item.children:
+ if stmt.type == token.INDENT:
+ # Insert, in reverse order, the statement, a newline,
+ # and an indent right after the first indented line
+ loc = item.children.index(stmt) + 1
+ # Keep consistent indentation form
+ ident = Leaf(token.INDENT, stmt.value)
+ item.insert_child(loc, ident)
+ item.insert_child(loc, Newline())
+ item.insert_child(loc, stmt_node)
+ break
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_newstyle.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_newstyle.py
new file mode 100644
index 000000000..cc6b3adcb
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_newstyle.py
@@ -0,0 +1,33 @@
+u"""
+Fixer for "class Foo: ..." -> "class Foo(object): ..."
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import LParen, RParen, Name
+
+from libfuturize.fixer_util import touch_import_top
+
+
+def insert_object(node, idx):
+ node.insert_child(idx, RParen())
+ node.insert_child(idx, Name(u"object"))
+ node.insert_child(idx, LParen())
+
+class FixNewstyle(fixer_base.BaseFix):
+
+ # Match:
+ # class Blah:
+ # and:
+ # class Blah():
+
+ PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
+
+ def transform(self, node, results):
+ colon = results[u"colon"]
+ idx = node.children.index(colon)
+ if (node.children[idx-2].value == '(' and
+ node.children[idx-1].value == ')'):
+ del node.children[idx-2:idx]
+ idx -= 2
+ insert_object(node, idx)
+ touch_import_top(u'builtins', 'object', node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_next.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_next.py
new file mode 100644
index 000000000..9ecb6c043
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_next.py
@@ -0,0 +1,43 @@
+u"""
+Fixer for:
+it.__next__() -> it.next().
+next(it) -> it.next().
+"""
+
+from lib2to3.pgen2 import token
+from lib2to3.pygram import python_symbols as syms
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Name, Call, find_binding, Attr
+
+bind_warning = u"Calls to builtin next() possibly shadowed by global binding"
+
+
+class FixNext(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ power< base=any+ trailer< '.' attr='__next__' > any* >
+ |
+ power< head='next' trailer< '(' arg=any ')' > any* >
+ |
+ classdef< 'class' base=any+ ':'
+ suite< any*
+ funcdef< 'def'
+ attr='__next__'
+ parameters< '(' NAME ')' > any+ >
+ any* > >
+ """
+
+ def transform(self, node, results):
+ assert results
+
+ base = results.get(u"base")
+ attr = results.get(u"attr")
+ head = results.get(u"head")
+ arg_ = results.get(u"arg")
+ if arg_:
+ arg = arg_.clone()
+ head.replace(Attr(Name(unicode(arg),prefix=head.prefix),
+ Name(u"next")))
+ arg_.remove()
+ elif base:
+ attr.replace(Name(u"next", prefix=attr.prefix))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_printfunction.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_printfunction.py
new file mode 100644
index 000000000..a2a6e0843
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_printfunction.py
@@ -0,0 +1,17 @@
+u"""
+Fixer for print: from __future__ import print_function.
+"""
+
+from lib2to3 import fixer_base
+from libfuturize.fixer_util import future_import
+
+class FixPrintfunction(fixer_base.BaseFix):
+
+ # explicit = True
+
+ PATTERN = u"""
+ power< 'print' trailer < '(' any* ')' > any* >
+ """
+
+ def transform(self, node, results):
+ future_import(u"print_function", node)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise.py
new file mode 100644
index 000000000..9c9c192f8
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise.py
@@ -0,0 +1,25 @@
+u"""Fixer for 'raise E(V).with_traceback(T)' -> 'raise E, V, T'"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Comma, Node, Leaf, token, syms
+
+class FixRaise(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >]
+ [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >"""
+
+ def transform(self, node, results):
+ name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc"))
+ chain = results.get(u"chain")
+ if chain is not None:
+ self.warning(node, u"explicit exception chaining is not supported in Python 2")
+ chain.prev_sibling.remove()
+ chain.remove()
+ if trc is not None:
+ val = val[0] if val else Leaf(token.NAME, u"None")
+ val.prefix = trc.prefix = u" "
+ kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(),
+ val.clone(), Comma(), trc.clone()]
+ raise_stmt = Node(syms.raise_stmt, kids)
+ node.replace(raise_stmt)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise_.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise_.py
new file mode 100644
index 000000000..0f020c454
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise_.py
@@ -0,0 +1,35 @@
+u"""Fixer for
+ raise E(V).with_traceback(T)
+ to:
+ from future.utils import raise_
+ ...
+ raise_(E, V, T)
+
+TODO: FIXME!!
+
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Comma, Node, Leaf, token, syms
+
+class FixRaise(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >]
+ [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >"""
+
+ def transform(self, node, results):
+ FIXME
+ name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc"))
+ chain = results.get(u"chain")
+ if chain is not None:
+ self.warning(node, u"explicit exception chaining is not supported in Python 2")
+ chain.prev_sibling.remove()
+ chain.remove()
+ if trc is not None:
+ val = val[0] if val else Leaf(token.NAME, u"None")
+ val.prefix = trc.prefix = u" "
+ kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(),
+ val.clone(), Comma(), trc.clone()]
+ raise_stmt = Node(syms.raise_stmt, kids)
+ node.replace(raise_stmt)
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_throw.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_throw.py
new file mode 100644
index 000000000..c0feed1ea
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_throw.py
@@ -0,0 +1,23 @@
+u"""Fixer for 'g.throw(E(V).with_traceback(T))' -> 'g.throw(E, V, T)'"""
+
+from lib2to3 import fixer_base
+from lib2to3.pytree import Node, Leaf
+from lib2to3.pgen2 import token
+from lib2to3.fixer_util import Comma
+
+class FixThrow(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ power< any trailer< '.' 'throw' >
+ trailer< '(' args=power< exc=any trailer< '(' val=any* ')' >
+ trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' > > ')' > >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+ exc, val, trc = (results[u"exc"], results[u"val"], results[u"trc"])
+ val = val[0] if val else Leaf(token.NAME, u"None")
+ val.prefix = trc.prefix = u" "
+ kids = [exc.clone(), Comma(), val.clone(), Comma(), trc.clone()]
+ args = results[u"args"]
+ args.children = kids
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_unpacking.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_unpacking.py
new file mode 100644
index 000000000..c2d3207a2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_unpacking.py
@@ -0,0 +1,120 @@
+u"""
+Fixer for:
+(a,)* *b (,c)* [,] = s
+for (a,)* *b (,c)* [,] in d: ...
+"""
+
+from lib2to3 import fixer_base
+from itertools import count
+from lib2to3.fixer_util import (Assign, Comma, Call, Newline, Name,
+ Number, token, syms, Node, Leaf)
+from libfuturize.fixer_util import indentation, suitify, commatize
+# from libfuturize.fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf
+
+def assignment_source(num_pre, num_post, LISTNAME, ITERNAME):
+ u"""
+ Accepts num_pre and num_post, which are counts of values
+ before and after the starg (not including the starg)
+ Returns a source fit for Assign() from fixer_util
+ """
+ children = []
+ pre = unicode(num_pre)
+ post = unicode(num_post)
+ # This code builds the assignment source from lib2to3 tree primitives.
+ # It's not very readable, but it seems like the most correct way to do it.
+ if num_pre > 0:
+ pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])])
+ children.append(pre_part)
+ children.append(Leaf(token.PLUS, u"+", prefix=u" "))
+ main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])])
+ children.append(main_part)
+ if num_post > 0:
+ children.append(Leaf(token.PLUS, u"+", prefix=u" "))
+ post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])])
+ children.append(post_part)
+ source = Node(syms.arith_expr, children)
+ return source
+
+class FixUnpacking(fixer_base.BaseFix):
+
+ PATTERN = u"""
+ expl=expr_stmt< testlist_star_expr<
+ pre=(any ',')*
+ star_expr< '*' name=NAME >
+ post=(',' any)* [','] > '=' source=any > |
+ impl=for_stmt< 'for' lst=exprlist<
+ pre=(any ',')*
+ star_expr< '*' name=NAME >
+ post=(',' any)* [','] > 'in' it=any ':' suite=any>"""
+
+ def fix_explicit_context(self, node, results):
+ pre, name, post, source = (results.get(n) for n in (u"pre", u"name", u"post", u"source"))
+ pre = [n.clone() for n in pre if n.type == token.NAME]
+ name.prefix = u" "
+ post = [n.clone() for n in post if n.type == token.NAME]
+ target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
+ # to make the special-case fix for "*z, = ..." correct with the least
+ # amount of modification, make the left-side into a guaranteed tuple
+ target.append(Comma())
+ source.prefix = u""
+ setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()]))
+ power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
+ return setup_line, power_line
+
+ def fix_implicit_context(self, node, results):
+ u"""
+ Only example of the implicit context is
+ a for loop, so only fix that.
+ """
+ pre, name, post, it = (results.get(n) for n in (u"pre", u"name", u"post", u"it"))
+ pre = [n.clone() for n in pre if n.type == token.NAME]
+ name.prefix = u" "
+ post = [n.clone() for n in post if n.type == token.NAME]
+ target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
+ # to make the special-case fix for "*z, = ..." correct with the least
+ # amount of modification, make the left-side into a guaranteed tuple
+ target.append(Comma())
+ source = it.clone()
+ source.prefix = u""
+ setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [Name(self.ITERNAME)]))
+ power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
+ return setup_line, power_line
+
+ def transform(self, node, results):
+ u"""
+ a,b,c,d,e,f,*g,h,i = range(100) changes to
+ _3to2list = list(range(100))
+ a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:]
+
+ and
+
+ for a,b,*c,d,e in iter_of_iters: do_stuff changes to
+ for _3to2iter in iter_of_iters:
+ _3to2list = list(_3to2iter)
+ a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:]
+ do_stuff
+ """
+ self.LISTNAME = self.new_name(u"_3to2list")
+ self.ITERNAME = self.new_name(u"_3to2iter")
+ expl, impl = results.get(u"expl"), results.get(u"impl")
+ if expl is not None:
+ setup_line, power_line = self.fix_explicit_context(node, results)
+ setup_line.prefix = expl.prefix
+ power_line.prefix = indentation(expl.parent)
+ setup_line.append_child(Newline())
+ parent = node.parent
+ i = node.remove()
+ parent.insert_child(i, power_line)
+ parent.insert_child(i, setup_line)
+ elif impl is not None:
+ setup_line, power_line = self.fix_implicit_context(node, results)
+ suitify(node)
+ suite = [k for k in node.children if k.type == syms.suite][0]
+ setup_line.prefix = u""
+ power_line.prefix = suite.children[1].value
+ suite.children[2].prefix = indentation(suite.children[2])
+ suite.insert_child(2, Newline())
+ suite.insert_child(2, power_line)
+ suite.insert_child(2, Newline())
+ suite.insert_child(2, setup_line)
+ results.get(u"lst").replace(Name(self.ITERNAME, prefix=u" "))
diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/main.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/main.py
new file mode 100644
index 000000000..4179174b5
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/main.py
@@ -0,0 +1,204 @@
+"""
+pasteurize: automatic conversion of Python 3 code to clean 2/3 code
+===================================================================
+
+``pasteurize`` attempts to convert existing Python 3 code into source-compatible
+Python 2 and 3 code.
+
+Use it like this on Python 3 code:
+
+ $ pasteurize --verbose mypython3script.py
+
+This removes any Py3-only syntax (e.g. new metaclasses) and adds these
+import lines:
+
+ from __future__ import absolute_import
+ from __future__ import division
+ from __future__ import print_function
+ from __future__ import unicode_literals
+ from future import standard_library
+ standard_library.install_hooks()
+ from builtins import *
+
+To write changes to the files, use the -w flag.
+
+It also adds any other wrappers needed for Py2/3 compatibility.
+
+Note that separate stages are not available (or needed) when converting from
+Python 3 with ``pasteurize`` as they are when converting from Python 2 with
+``futurize``.
+
+The --all-imports option forces adding all ``__future__`` imports,
+``builtins`` imports, and standard library aliases, even if they don't
+seem necessary for the current state of each module. (This can simplify
+testing, and can reduce the need to think about Py2 compatibility when editing
+the code further.)
+
+"""
+
+from __future__ import (absolute_import, print_function, unicode_literals)
+
+import sys
+import logging
+import optparse
+from lib2to3.main import main, warn, StdoutRefactoringTool
+from lib2to3 import refactor
+
+from future import __version__
+from libpasteurize.fixes import fix_names
+
+
+def main(args=None):
+ """Main program.
+
+ Returns a suggested exit status (0, 1, 2).
+ """
+ # Set up option parser
+ parser = optparse.OptionParser(usage="pasteurize [options] file|dir ...")
+ parser.add_option("-V", "--version", action="store_true",
+ help="Report the version number of pasteurize")
+ parser.add_option("-a", "--all-imports", action="store_true",
+ help="Adds all __future__ and future imports to each module")
+ parser.add_option("-f", "--fix", action="append", default=[],
+ help="Each FIX specifies a transformation; default: all")
+ parser.add_option("-j", "--processes", action="store", default=1,
+ type="int", help="Run 2to3 concurrently")
+ parser.add_option("-x", "--nofix", action="append", default=[],
+ help="Prevent a fixer from being run.")
+ parser.add_option("-l", "--list-fixes", action="store_true",
+ help="List available transformations")
+ # parser.add_option("-p", "--print-function", action="store_true",
+ # help="Modify the grammar so that print() is a function")
+ parser.add_option("-v", "--verbose", action="store_true",
+ help="More verbose logging")
+ parser.add_option("--no-diffs", action="store_true",
+ help="Don't show diffs of the refactoring")
+ parser.add_option("-w", "--write", action="store_true",
+ help="Write back modified files")
+ parser.add_option("-n", "--nobackups", action="store_true", default=False,
+ help="Don't write backups for modified files.")
+
+ # Parse command line arguments
+ refactor_stdin = False
+ flags = {}
+ options, args = parser.parse_args(args)
+ fixer_pkg = 'libpasteurize.fixes'
+ avail_fixes = fix_names
+ flags["print_function"] = True
+
+ if not options.write and options.no_diffs:
+ warn("not writing files and not printing diffs; that's not very useful")
+ if not options.write and options.nobackups:
+ parser.error("Can't use -n without -w")
+ if options.version:
+ print(__version__)
+ return 0
+ if options.list_fixes:
+ print("Available transformations for the -f/--fix option:")
+ for fixname in sorted(avail_fixes):
+ print(fixname)
+ if not args:
+ return 0
+ if not args:
+ print("At least one file or directory argument required.",
+ file=sys.stderr)
+ print("Use --help to show usage.", file=sys.stderr)
+ return 2
+ if "-" in args:
+ refactor_stdin = True
+ if options.write:
+ print("Can't write to stdin.", file=sys.stderr)
+ return 2
+
+ # Set up logging handler
+ level = logging.DEBUG if options.verbose else logging.INFO
+ logging.basicConfig(format='%(name)s: %(message)s', level=level)
+
+ unwanted_fixes = set()
+ for fix in options.nofix:
+ if ".fix_" in fix:
+ unwanted_fixes.add(fix)
+ else:
+ # Infer the full module name for the fixer.
+ # First ensure that no names clash (e.g.
+ # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
+ found = [f for f in avail_fixes
+ if f.endswith('fix_{0}'.format(fix))]
+ if len(found) > 1:
+ print("Ambiguous fixer name. Choose a fully qualified "
+ "module name instead from these:\n" +
+ "\n".join(" " + myf for myf in found),
+ file=sys.stderr)
+ return 2
+ elif len(found) == 0:
+ print("Unknown fixer. Use --list-fixes or -l for a list.",
+ file=sys.stderr)
+ return 2
+ unwanted_fixes.add(found[0])
+
+ extra_fixes = set()
+ if options.all_imports:
+ prefix = 'libpasteurize.fixes.'
+ extra_fixes.add(prefix + 'fix_add_all__future__imports')
+ extra_fixes.add(prefix + 'fix_add_future_standard_library_import')
+ extra_fixes.add(prefix + 'fix_add_all_future_builtins')
+
+ explicit = set()
+ if options.fix:
+ all_present = False
+ for fix in options.fix:
+ if fix == 'all':
+ all_present = True
+ else:
+ if ".fix_" in fix:
+ explicit.add(fix)
+ else:
+ # Infer the full module name for the fixer.
+ # First ensure that no names clash (e.g.
+ # lib2to3.fixes.fix_blah and libpasteurize.fixes.fix_blah):
+ found = [f for f in avail_fixes
+ if f.endswith('fix_{0}'.format(fix))]
+ if len(found) > 1:
+ print("Ambiguous fixer name. Choose a fully qualified "
+ "module name instead from these:\n" +
+ "\n".join(" " + myf for myf in found),
+ file=sys.stderr)
+ return 2
+ elif len(found) == 0:
+ print("Unknown fixer. Use --list-fixes or -l for a list.",
+ file=sys.stderr)
+ return 2
+ explicit.add(found[0])
+ if len(explicit & unwanted_fixes) > 0:
+ print("Conflicting usage: the following fixers have been "
+ "simultaneously requested and disallowed:\n" +
+ "\n".join(" " + myf for myf in (explicit & unwanted_fixes)),
+ file=sys.stderr)
+ return 2
+ requested = avail_fixes.union(explicit) if all_present else explicit
+ else:
+ requested = avail_fixes.union(explicit)
+
+ fixer_names = requested | extra_fixes - unwanted_fixes
+
+ # Initialize the refactoring tool
+ rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(),
+ options.nobackups, not options.no_diffs)
+
+ # Refactor all files and directories passed as arguments
+ if not rt.errors:
+ if refactor_stdin:
+ rt.refactor_stdin()
+ else:
+ try:
+ rt.refactor(args, options.write, None,
+ options.processes)
+ except refactor.MultiprocessingUnsupported:
+ assert options.processes > 1
+ print("Sorry, -j isn't " \
+ "supported on this platform.", file=sys.stderr)
+ return 1
+ rt.summarize()
+
+ # Return error status (0 if rt.errors is zero)
+ return int(bool(rt.errors))
diff --git a/.install/.kodi/addons/script.module.future/libs/past/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/__init__.py
new file mode 100644
index 000000000..3b5d9db17
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/__init__.py
@@ -0,0 +1,92 @@
+# coding=utf-8
+"""
+past: compatibility with Python 2 from Python 3
+===============================================
+
+``past`` is a package to aid with Python 2/3 compatibility. Whereas ``future``
+contains backports of Python 3 constructs to Python 2, ``past`` provides
+implementations of some Python 2 constructs in Python 3 and tools to import and
+run Python 2 code in Python 3. It is intended to be used sparingly, as a way of
+running old Python 2 code from Python 3 until the code is ported properly.
+
+Potential uses for libraries:
+
+- as a step in porting a Python 2 codebase to Python 3 (e.g. with the ``futurize`` script)
+- to provide Python 3 support for previously Python 2-only libraries with the
+ same APIs as on Python 2 -- particularly with regard to 8-bit strings (the
+ ``past.builtins.str`` type).
+- to aid in providing minimal-effort Python 3 support for applications using
+ libraries that do not yet wish to upgrade their code properly to Python 3, or
+ wish to upgrade it gradually to Python 3 style.
+
+
+Here are some code examples that run identically on Python 3 and 2::
+
+ >>> from past.builtins import str as oldstr
+
+ >>> philosopher = oldstr(u'\u5b54\u5b50'.encode('utf-8'))
+ >>> # This now behaves like a Py2 byte-string on both Py2 and Py3.
+ >>> # For example, indexing returns a Python 2-like string object, not
+ >>> # an integer:
+ >>> philosopher[0]
+ '\xe5'
+ >>> type(philosopher[0])
+
+
+ >>> # List-producing versions of range, reduce, map, filter
+ >>> from past.builtins import range, reduce
+ >>> range(10)
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
+ >>> reduce(lambda x, y: x+y, [1, 2, 3, 4, 5])
+ 15
+
+ >>> # Other functions removed in Python 3 are resurrected ...
+ >>> from past.builtins import execfile
+ >>> execfile('myfile.py')
+
+ >>> from past.builtins import raw_input
+ >>> name = raw_input('What is your name? ')
+ What is your name? [cursor]
+
+ >>> from past.builtins import reload
+ >>> reload(mymodule) # equivalent to imp.reload(mymodule) in Python 3
+
+ >>> from past.builtins import xrange
+ >>> for i in xrange(10):
+ ... pass
+
+
+It also provides import hooks so you can import and use Python 2 modules like
+this::
+
+ $ python3
+
+ >>> from past import autotranslate
+ >>> authotranslate('mypy2module')
+ >>> import mypy2module
+
+until the authors of the Python 2 modules have upgraded their code. Then, for
+example::
+
+ >>> mypy2module.func_taking_py2_string(oldstr(b'abcd'))
+
+
+Credits
+-------
+
+:Author: Ed Schofield
+:Sponsor: Python Charmers Pty Ltd, Australia: http://pythoncharmers.com
+
+
+Licensing
+---------
+Copyright 2013-2018 Python Charmers Pty Ltd, Australia.
+The software is distributed under an MIT licence. See LICENSE.txt.
+"""
+
+
+from past.translation import install_hooks as autotranslate
+from future import __version__, __copyright__, __license__
+
+__title__ = 'past'
+__author__ = 'Ed Schofield'
diff --git a/.install/.kodi/addons/script.module.future/libs/past/builtins/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/builtins/__init__.py
new file mode 100644
index 000000000..1b19e373c
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/builtins/__init__.py
@@ -0,0 +1,72 @@
+"""
+A resurrection of some old functions from Python 2 for use in Python 3. These
+should be used sparingly, to help with porting efforts, since code using them
+is no longer standard Python 3 code.
+
+This module provides the following:
+
+1. Implementations of these builtin functions which have no equivalent on Py3:
+
+- apply
+- chr
+- cmp
+- execfile
+
+2. Aliases:
+
+- intern <- sys.intern
+- raw_input <- input
+- reduce <- functools.reduce
+- reload <- imp.reload
+- unichr <- chr
+- unicode <- str
+- xrange <- range
+
+3. List-producing versions of the corresponding Python 3 iterator-producing functions:
+
+- filter
+- map
+- range
+- zip
+
+4. Forward-ported Py2 types:
+
+- basestring
+- dict
+- str
+- long
+- unicode
+
+"""
+
+from future.utils import PY3
+from past.builtins.noniterators import (filter, map, range, reduce, zip)
+# from past.builtins.misc import (ascii, hex, input, oct, open)
+if PY3:
+ from past.types import (basestring,
+ olddict as dict,
+ oldstr as str,
+ long,
+ unicode)
+else:
+ from __builtin__ import (basestring, dict, str, long, unicode)
+
+from past.builtins.misc import (apply, chr, cmp, execfile, intern, oct,
+ raw_input, reload, unichr, unicode, xrange)
+from past import utils
+
+
+if utils.PY3:
+ # We only import names that shadow the builtins on Py3. No other namespace
+ # pollution on Py3.
+
+ # Only shadow builtins on Py3; no new names
+ __all__ = ['filter', 'map', 'range', 'reduce', 'zip',
+ 'basestring', 'dict', 'str', 'long', 'unicode',
+ 'apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input',
+ 'reload', 'unichr', 'xrange'
+ ]
+
+else:
+ # No namespace pollution on Py2
+ __all__ = []
diff --git a/.install/.kodi/addons/script.module.future/libs/past/builtins/misc.py b/.install/.kodi/addons/script.module.future/libs/past/builtins/misc.py
new file mode 100644
index 000000000..06fbb92d2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/builtins/misc.py
@@ -0,0 +1,89 @@
+from __future__ import unicode_literals
+import sys
+import inspect
+from collections import Mapping
+
+from future.utils import PY3, exec_
+
+
+if PY3:
+ import builtins
+
+ def apply(f, *args, **kw):
+ return f(*args, **kw)
+
+ from past.builtins import str as oldstr
+
+ def chr(i):
+ """
+ Return a byte-string of one character with ordinal i; 0 <= i <= 256
+ """
+ return oldstr(bytes((i,)))
+
+ def cmp(x, y):
+ """
+ cmp(x, y) -> integer
+
+ Return negative if xy.
+ """
+ return (x > y) - (x < y)
+
+ from sys import intern
+
+ def oct(number):
+ """oct(number) -> string
+
+ Return the octal representation of an integer
+ """
+ return '0' + builtins.oct(number)[2:]
+
+ raw_input = input
+ from imp import reload
+ unicode = str
+ unichr = chr
+ xrange = range
+else:
+ import __builtin__
+ apply = __builtin__.apply
+ chr = __builtin__.chr
+ cmp = __builtin__.cmp
+ execfile = __builtin__.execfile
+ intern = __builtin__.intern
+ oct = __builtin__.oct
+ raw_input = __builtin__.raw_input
+ reload = __builtin__.reload
+ unicode = __builtin__.unicode
+ unichr = __builtin__.unichr
+ xrange = __builtin__.xrange
+
+
+if PY3:
+ def execfile(filename, myglobals=None, mylocals=None):
+ """
+ Read and execute a Python script from a file in the given namespaces.
+ The globals and locals are dictionaries, defaulting to the current
+ globals and locals. If only globals is given, locals defaults to it.
+ """
+ if myglobals is None:
+ # There seems to be no alternative to frame hacking here.
+ caller_frame = inspect.stack()[1]
+ myglobals = caller_frame[0].f_globals
+ mylocals = caller_frame[0].f_locals
+ elif mylocals is None:
+ # Only if myglobals is given do we set mylocals to it.
+ mylocals = myglobals
+ if not isinstance(myglobals, Mapping):
+ raise TypeError('globals must be a mapping')
+ if not isinstance(mylocals, Mapping):
+ raise TypeError('locals must be a mapping')
+ with open(filename, "rbU") as fin:
+ source = fin.read()
+ code = compile(source, filename, "exec")
+ exec_(code, myglobals, mylocals)
+
+
+if PY3:
+ __all__ = ['apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input',
+ 'reload', 'unichr', 'unicode', 'xrange']
+else:
+ __all__ = []
diff --git a/.install/.kodi/addons/script.module.future/libs/past/builtins/noniterators.py b/.install/.kodi/addons/script.module.future/libs/past/builtins/noniterators.py
new file mode 100644
index 000000000..5826b97c1
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/builtins/noniterators.py
@@ -0,0 +1,272 @@
+"""
+This module is designed to be used as follows::
+
+ from past.builtins.noniterators import filter, map, range, reduce, zip
+
+And then, for example::
+
+ assert isinstance(range(5), list)
+
+The list-producing functions this brings in are::
+
+- ``filter``
+- ``map``
+- ``range``
+- ``reduce``
+- ``zip``
+
+"""
+
+from __future__ import division, absolute_import, print_function
+
+from itertools import chain, starmap
+import itertools # since zip_longest doesn't exist on Py2
+from past.types import basestring
+from past.utils import PY3
+
+
+def flatmap(f, items):
+ return chain.from_iterable(map(f, items))
+
+
+if PY3:
+ import builtins
+
+ # list-producing versions of the major Python iterating functions
+ def oldfilter(*args):
+ """
+ filter(function or None, sequence) -> list, tuple, or string
+
+ Return those items of sequence for which function(item) is true.
+ If function is None, return the items that are true. If sequence
+ is a tuple or string, return the same type, else return a list.
+ """
+ mytype = type(args[1])
+ if isinstance(args[1], basestring):
+ return mytype().join(builtins.filter(*args))
+ elif isinstance(args[1], (tuple, list)):
+ return mytype(builtins.filter(*args))
+ else:
+ # Fall back to list. Is this the right thing to do?
+ return list(builtins.filter(*args))
+
+ # This is surprisingly difficult to get right. For example, the
+ # solutions here fail with the test cases in the docstring below:
+ # http://stackoverflow.com/questions/8072755/
+ def oldmap(func, *iterables):
+ """
+ map(function, sequence[, sequence, ...]) -> list
+
+ Return a list of the results of applying the function to the
+ items of the argument sequence(s). If more than one sequence is
+ given, the function is called with an argument list consisting of
+ the corresponding item of each sequence, substituting None for
+ missing values when not all sequences have the same length. If
+ the function is None, return a list of the items of the sequence
+ (or a list of tuples if more than one sequence).
+
+ Test cases:
+ >>> oldmap(None, 'hello world')
+ ['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd']
+
+ >>> oldmap(None, range(4))
+ [0, 1, 2, 3]
+
+ More test cases are in past.tests.test_builtins.
+ """
+ zipped = itertools.zip_longest(*iterables)
+ l = list(zipped)
+ if len(l) == 0:
+ return []
+ if func is None:
+ result = l
+ else:
+ result = list(starmap(func, l))
+
+ # Inspect to see whether it's a simple sequence of tuples
+ try:
+ if max([len(item) for item in result]) == 1:
+ return list(chain.from_iterable(result))
+ # return list(flatmap(func, result))
+ except TypeError as e:
+ # Simple objects like ints have no len()
+ pass
+ return result
+
+ ############################
+ ### For reference, the source code for Py2.7 map function:
+ # static PyObject *
+ # builtin_map(PyObject *self, PyObject *args)
+ # {
+ # typedef struct {
+ # PyObject *it; /* the iterator object */
+ # int saw_StopIteration; /* bool: did the iterator end? */
+ # } sequence;
+ #
+ # PyObject *func, *result;
+ # sequence *seqs = NULL, *sqp;
+ # Py_ssize_t n, len;
+ # register int i, j;
+ #
+ # n = PyTuple_Size(args);
+ # if (n < 2) {
+ # PyErr_SetString(PyExc_TypeError,
+ # "map() requires at least two args");
+ # return NULL;
+ # }
+ #
+ # func = PyTuple_GetItem(args, 0);
+ # n--;
+ #
+ # if (func == Py_None) {
+ # if (PyErr_WarnPy3k("map(None, ...) not supported in 3.x; "
+ # "use list(...)", 1) < 0)
+ # return NULL;
+ # if (n == 1) {
+ # /* map(None, S) is the same as list(S). */
+ # return PySequence_List(PyTuple_GetItem(args, 1));
+ # }
+ # }
+ #
+ # /* Get space for sequence descriptors. Must NULL out the iterator
+ # * pointers so that jumping to Fail_2 later doesn't see trash.
+ # */
+ # if ((seqs = PyMem_NEW(sequence, n)) == NULL) {
+ # PyErr_NoMemory();
+ # return NULL;
+ # }
+ # for (i = 0; i < n; ++i) {
+ # seqs[i].it = (PyObject*)NULL;
+ # seqs[i].saw_StopIteration = 0;
+ # }
+ #
+ # /* Do a first pass to obtain iterators for the arguments, and set len
+ # * to the largest of their lengths.
+ # */
+ # len = 0;
+ # for (i = 0, sqp = seqs; i < n; ++i, ++sqp) {
+ # PyObject *curseq;
+ # Py_ssize_t curlen;
+ #
+ # /* Get iterator. */
+ # curseq = PyTuple_GetItem(args, i+1);
+ # sqp->it = PyObject_GetIter(curseq);
+ # if (sqp->it == NULL) {
+ # static char errmsg[] =
+ # "argument %d to map() must support iteration";
+ # char errbuf[sizeof(errmsg) + 25];
+ # PyOS_snprintf(errbuf, sizeof(errbuf), errmsg, i+2);
+ # PyErr_SetString(PyExc_TypeError, errbuf);
+ # goto Fail_2;
+ # }
+ #
+ # /* Update len. */
+ # curlen = _PyObject_LengthHint(curseq, 8);
+ # if (curlen > len)
+ # len = curlen;
+ # }
+ #
+ # /* Get space for the result list. */
+ # if ((result = (PyObject *) PyList_New(len)) == NULL)
+ # goto Fail_2;
+ #
+ # /* Iterate over the sequences until all have stopped. */
+ # for (i = 0; ; ++i) {
+ # PyObject *alist, *item=NULL, *value;
+ # int numactive = 0;
+ #
+ # if (func == Py_None && n == 1)
+ # alist = NULL;
+ # else if ((alist = PyTuple_New(n)) == NULL)
+ # goto Fail_1;
+ #
+ # for (j = 0, sqp = seqs; j < n; ++j, ++sqp) {
+ # if (sqp->saw_StopIteration) {
+ # Py_INCREF(Py_None);
+ # item = Py_None;
+ # }
+ # else {
+ # item = PyIter_Next(sqp->it);
+ # if (item)
+ # ++numactive;
+ # else {
+ # if (PyErr_Occurred()) {
+ # Py_XDECREF(alist);
+ # goto Fail_1;
+ # }
+ # Py_INCREF(Py_None);
+ # item = Py_None;
+ # sqp->saw_StopIteration = 1;
+ # }
+ # }
+ # if (alist)
+ # PyTuple_SET_ITEM(alist, j, item);
+ # else
+ # break;
+ # }
+ #
+ # if (!alist)
+ # alist = item;
+ #
+ # if (numactive == 0) {
+ # Py_DECREF(alist);
+ # break;
+ # }
+ #
+ # if (func == Py_None)
+ # value = alist;
+ # else {
+ # value = PyEval_CallObject(func, alist);
+ # Py_DECREF(alist);
+ # if (value == NULL)
+ # goto Fail_1;
+ # }
+ # if (i >= len) {
+ # int status = PyList_Append(result, value);
+ # Py_DECREF(value);
+ # if (status < 0)
+ # goto Fail_1;
+ # }
+ # else if (PyList_SetItem(result, i, value) < 0)
+ # goto Fail_1;
+ # }
+ #
+ # if (i < len && PyList_SetSlice(result, i, len, NULL) < 0)
+ # goto Fail_1;
+ #
+ # goto Succeed;
+ #
+ # Fail_1:
+ # Py_DECREF(result);
+ # Fail_2:
+ # result = NULL;
+ # Succeed:
+ # assert(seqs);
+ # for (i = 0; i < n; ++i)
+ # Py_XDECREF(seqs[i].it);
+ # PyMem_DEL(seqs);
+ # return result;
+ # }
+
+ def oldrange(*args, **kwargs):
+ return list(builtins.range(*args, **kwargs))
+
+ def oldzip(*args, **kwargs):
+ return list(builtins.zip(*args, **kwargs))
+
+ filter = oldfilter
+ map = oldmap
+ range = oldrange
+ from functools import reduce
+ zip = oldzip
+ __all__ = ['filter', 'map', 'range', 'reduce', 'zip']
+
+else:
+ import __builtin__
+ # Python 2-builtin ranges produce lists
+ filter = __builtin__.filter
+ map = __builtin__.map
+ range = __builtin__.range
+ reduce = __builtin__.reduce
+ zip = __builtin__.zip
+ __all__ = []
diff --git a/.install/.kodi/addons/script.module.future/libs/past/tests/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/.install/.kodi/addons/script.module.future/libs/past/translation/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/translation/__init__.py
new file mode 100644
index 000000000..c7ae2b7a0
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/translation/__init__.py
@@ -0,0 +1,497 @@
+# -*- coding: utf-8 -*-
+"""
+past.translation
+==================
+
+The ``past.translation`` package provides an import hook for Python 3 which
+transparently runs ``futurize`` fixers over Python 2 code on import to convert
+print statements into functions, etc.
+
+It is intended to assist users in migrating to Python 3.x even if some
+dependencies still only support Python 2.x.
+
+Usage
+-----
+
+Once your Py2 package is installed in the usual module search path, the import
+hook is invoked as follows:
+
+ >>> from past import autotranslate
+ >>> autotranslate('mypackagename')
+
+Or:
+
+ >>> autotranslate(['mypackage1', 'mypackage2'])
+
+You can unregister the hook using::
+
+ >>> from past.translation import remove_hooks
+ >>> remove_hooks()
+
+Author: Ed Schofield.
+Inspired by and based on ``uprefix`` by Vinay M. Sajip.
+"""
+
+import imp
+import logging
+import marshal
+import os
+import sys
+import copy
+from lib2to3.pgen2.parse import ParseError
+from lib2to3.refactor import RefactoringTool
+
+from libfuturize import fixes
+
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
+
+myfixes = (list(fixes.libfuturize_fix_names_stage1) +
+ list(fixes.lib2to3_fix_names_stage1) +
+ list(fixes.libfuturize_fix_names_stage2) +
+ list(fixes.lib2to3_fix_names_stage2))
+
+
+# We detect whether the code is Py2 or Py3 by applying certain lib2to3 fixers
+# to it. If the diff is empty, it's Python 3 code.
+
+py2_detect_fixers = [
+# From stage 1:
+ 'lib2to3.fixes.fix_apply',
+ # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2
+ 'lib2to3.fixes.fix_except',
+ 'lib2to3.fixes.fix_execfile',
+ 'lib2to3.fixes.fix_exitfunc',
+ 'lib2to3.fixes.fix_funcattrs',
+ 'lib2to3.fixes.fix_filter',
+ 'lib2to3.fixes.fix_has_key',
+ 'lib2to3.fixes.fix_idioms',
+ 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import)
+ 'lib2to3.fixes.fix_intern',
+ 'lib2to3.fixes.fix_isinstance',
+ 'lib2to3.fixes.fix_methodattrs',
+ 'lib2to3.fixes.fix_ne',
+ 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755
+ 'lib2to3.fixes.fix_paren',
+ 'lib2to3.fixes.fix_print',
+ 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions
+ 'lib2to3.fixes.fix_renames',
+ 'lib2to3.fixes.fix_reduce',
+ # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support
+ 'lib2to3.fixes.fix_repr',
+ 'lib2to3.fixes.fix_standarderror',
+ 'lib2to3.fixes.fix_sys_exc',
+ 'lib2to3.fixes.fix_throw',
+ 'lib2to3.fixes.fix_tuple_params',
+ 'lib2to3.fixes.fix_types',
+ 'lib2to3.fixes.fix_ws_comma',
+ 'lib2to3.fixes.fix_xreadlines',
+
+# From stage 2:
+ 'lib2to3.fixes.fix_basestring',
+ # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this.
+ # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+
+ # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc.
+ 'lib2to3.fixes.fix_exec',
+ # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports
+ 'lib2to3.fixes.fix_getcwdu',
+ # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library
+ # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm)
+ # 'lib2to3.fixes.fix_input',
+ # 'lib2to3.fixes.fix_itertools',
+ # 'lib2to3.fixes.fix_itertools_imports',
+ 'lib2to3.fixes.fix_long',
+ # 'lib2to3.fixes.fix_map',
+ # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead
+ 'lib2to3.fixes.fix_next',
+ 'lib2to3.fixes.fix_nonzero', # TODO: add a decorator for mapping __bool__ to __nonzero__
+ # 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3
+ 'lib2to3.fixes.fix_raw_input',
+ # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings
+ # 'lib2to3.fixes.fix_urllib',
+ 'lib2to3.fixes.fix_xrange',
+ # 'lib2to3.fixes.fix_zip',
+]
+
+
+class RTs:
+ """
+ A namespace for the refactoring tools. This avoids creating these at
+ the module level, which slows down the module import. (See issue #117).
+
+ There are two possible grammars: with or without the print statement.
+ Hence we have two possible refactoring tool implementations.
+ """
+ _rt = None
+ _rtp = None
+ _rt_py2_detect = None
+ _rtp_py2_detect = None
+
+ @staticmethod
+ def setup():
+ """
+ Call this before using the refactoring tools to create them on demand
+ if needed.
+ """
+ if None in [RTs._rt, RTs._rtp]:
+ RTs._rt = RefactoringTool(myfixes)
+ RTs._rtp = RefactoringTool(myfixes, {'print_function': True})
+
+
+ @staticmethod
+ def setup_detect_python2():
+ """
+ Call this before using the refactoring tools to create them on demand
+ if needed.
+ """
+ if None in [RTs._rt_py2_detect, RTs._rtp_py2_detect]:
+ RTs._rt_py2_detect = RefactoringTool(py2_detect_fixers)
+ RTs._rtp_py2_detect = RefactoringTool(py2_detect_fixers,
+ {'print_function': True})
+
+
+# We need to find a prefix for the standard library, as we don't want to
+# process any files there (they will already be Python 3).
+#
+# The following method is used by Sanjay Vinip in uprefix. This fails for
+# ``conda`` environments:
+# # In a non-pythonv virtualenv, sys.real_prefix points to the installed Python.
+# # In a pythonv venv, sys.base_prefix points to the installed Python.
+# # Outside a virtual environment, sys.prefix points to the installed Python.
+
+# if hasattr(sys, 'real_prefix'):
+# _syslibprefix = sys.real_prefix
+# else:
+# _syslibprefix = getattr(sys, 'base_prefix', sys.prefix)
+
+# Instead, we use the portion of the path common to both the stdlib modules
+# ``math`` and ``urllib``.
+
+def splitall(path):
+ """
+ Split a path into all components. From Python Cookbook.
+ """
+ allparts = []
+ while True:
+ parts = os.path.split(path)
+ if parts[0] == path: # sentinel for absolute paths
+ allparts.insert(0, parts[0])
+ break
+ elif parts[1] == path: # sentinel for relative paths
+ allparts.insert(0, parts[1])
+ break
+ else:
+ path = parts[0]
+ allparts.insert(0, parts[1])
+ return allparts
+
+
+def common_substring(s1, s2):
+ """
+ Returns the longest common substring to the two strings, starting from the
+ left.
+ """
+ chunks = []
+ path1 = splitall(s1)
+ path2 = splitall(s2)
+ for (dir1, dir2) in zip(path1, path2):
+ if dir1 != dir2:
+ break
+ chunks.append(dir1)
+ return os.path.join(*chunks)
+
+# _stdlibprefix = common_substring(math.__file__, urllib.__file__)
+
+
+def detect_python2(source, pathname):
+ """
+ Returns a bool indicating whether we think the code is Py2
+ """
+ RTs.setup_detect_python2()
+ try:
+ tree = RTs._rt_py2_detect.refactor_string(source, pathname)
+ except ParseError as e:
+ if e.msg != 'bad input' or e.value != '=':
+ raise
+ tree = RTs._rtp.refactor_string(source, pathname)
+
+ if source != str(tree)[:-1]: # remove added newline
+ # The above fixers made changes, so we conclude it's Python 2 code
+ logger.debug('Detected Python 2 code: {0}'.format(pathname))
+ with open('/tmp/original_code.py', 'w') as f:
+ f.write('### Original code (detected as py2): %s\n%s' %
+ (pathname, source))
+ with open('/tmp/py2_detection_code.py', 'w') as f:
+ f.write('### Code after running py3 detection (from %s)\n%s' %
+ (pathname, str(tree)[:-1]))
+ return True
+ else:
+ logger.debug('Detected Python 3 code: {0}'.format(pathname))
+ with open('/tmp/original_code.py', 'w') as f:
+ f.write('### Original code (detected as py3): %s\n%s' %
+ (pathname, source))
+ try:
+ os.remove('/tmp/futurize_code.py')
+ except OSError:
+ pass
+ return False
+
+
+class Py2Fixer(object):
+ """
+ An import hook class that uses lib2to3 for source-to-source translation of
+ Py2 code to Py3.
+ """
+
+ # See the comments on :class:future.standard_library.RenameImport.
+ # We add this attribute here so remove_hooks() and install_hooks() can
+ # unambiguously detect whether the import hook is installed:
+ PY2FIXER = True
+
+ def __init__(self):
+ self.found = None
+ self.base_exclude_paths = ['future', 'past']
+ self.exclude_paths = copy.copy(self.base_exclude_paths)
+ self.include_paths = []
+
+ def include(self, paths):
+ """
+ Pass in a sequence of module names such as 'plotrique.plotting' that,
+ if present at the leftmost side of the full package name, would
+ specify the module to be transformed from Py2 to Py3.
+ """
+ self.include_paths += paths
+
+ def exclude(self, paths):
+ """
+ Pass in a sequence of strings such as 'mymodule' that, if
+ present at the leftmost side of the full package name, would cause
+ the module not to undergo any source transformation.
+ """
+ self.exclude_paths += paths
+
+ def find_module(self, fullname, path=None):
+ logger.debug('Running find_module: {0}...'.format(fullname))
+ if '.' in fullname:
+ parent, child = fullname.rsplit('.', 1)
+ if path is None:
+ loader = self.find_module(parent, path)
+ mod = loader.load_module(parent)
+ path = mod.__path__
+ fullname = child
+
+ # Perhaps we should try using the new importlib functionality in Python
+ # 3.3: something like this?
+ # thing = importlib.machinery.PathFinder.find_module(fullname, path)
+ try:
+ self.found = imp.find_module(fullname, path)
+ except Exception as e:
+ logger.debug('Py2Fixer could not find {0}')
+ logger.debug('Exception was: {0})'.format(fullname, e))
+ return None
+ self.kind = self.found[-1][-1]
+ if self.kind == imp.PKG_DIRECTORY:
+ self.pathname = os.path.join(self.found[1], '__init__.py')
+ elif self.kind == imp.PY_SOURCE:
+ self.pathname = self.found[1]
+ return self
+
+ def transform(self, source):
+ # This implementation uses lib2to3,
+ # you can override and use something else
+ # if that's better for you
+
+ # lib2to3 likes a newline at the end
+ RTs.setup()
+ source += '\n'
+ try:
+ tree = RTs._rt.refactor_string(source, self.pathname)
+ except ParseError as e:
+ if e.msg != 'bad input' or e.value != '=':
+ raise
+ tree = RTs._rtp.refactor_string(source, self.pathname)
+ # could optimise a bit for only doing str(tree) if
+ # getattr(tree, 'was_changed', False) returns True
+ return str(tree)[:-1] # remove added newline
+
+ def load_module(self, fullname):
+ logger.debug('Running load_module for {0}...'.format(fullname))
+ if fullname in sys.modules:
+ mod = sys.modules[fullname]
+ else:
+ if self.kind in (imp.PY_COMPILED, imp.C_EXTENSION, imp.C_BUILTIN,
+ imp.PY_FROZEN):
+ convert = False
+ # elif (self.pathname.startswith(_stdlibprefix)
+ # and 'site-packages' not in self.pathname):
+ # # We assume it's a stdlib package in this case. Is this too brittle?
+ # # Please file a bug report at https://github.com/PythonCharmers/python-future
+ # # if so.
+ # convert = False
+ # in theory, other paths could be configured to be excluded here too
+ elif any([fullname.startswith(path) for path in self.exclude_paths]):
+ convert = False
+ elif any([fullname.startswith(path) for path in self.include_paths]):
+ convert = True
+ else:
+ convert = False
+ if not convert:
+ logger.debug('Excluded {0} from translation'.format(fullname))
+ mod = imp.load_module(fullname, *self.found)
+ else:
+ logger.debug('Autoconverting {0} ...'.format(fullname))
+ mod = imp.new_module(fullname)
+ sys.modules[fullname] = mod
+
+ # required by PEP 302
+ mod.__file__ = self.pathname
+ mod.__name__ = fullname
+ mod.__loader__ = self
+
+ # This:
+ # mod.__package__ = '.'.join(fullname.split('.')[:-1])
+ # seems to result in "SystemError: Parent module '' not loaded,
+ # cannot perform relative import" for a package's __init__.py
+ # file. We use the approach below. Another option to try is the
+ # minimal load_module pattern from the PEP 302 text instead.
+
+ # Is the test in the next line more or less robust than the
+ # following one? Presumably less ...
+ # ispkg = self.pathname.endswith('__init__.py')
+
+ if self.kind == imp.PKG_DIRECTORY:
+ mod.__path__ = [ os.path.dirname(self.pathname) ]
+ mod.__package__ = fullname
+ else:
+ #else, regular module
+ mod.__path__ = []
+ mod.__package__ = fullname.rpartition('.')[0]
+
+ try:
+ cachename = imp.cache_from_source(self.pathname)
+ if not os.path.exists(cachename):
+ update_cache = True
+ else:
+ sourcetime = os.stat(self.pathname).st_mtime
+ cachetime = os.stat(cachename).st_mtime
+ update_cache = cachetime < sourcetime
+ # # Force update_cache to work around a problem with it being treated as Py3 code???
+ # update_cache = True
+ if not update_cache:
+ with open(cachename, 'rb') as f:
+ data = f.read()
+ try:
+ code = marshal.loads(data)
+ except Exception:
+ # pyc could be corrupt. Regenerate it
+ update_cache = True
+ if update_cache:
+ if self.found[0]:
+ source = self.found[0].read()
+ elif self.kind == imp.PKG_DIRECTORY:
+ with open(self.pathname) as f:
+ source = f.read()
+
+ if detect_python2(source, self.pathname):
+ source = self.transform(source)
+ with open('/tmp/futurized_code.py', 'w') as f:
+ f.write('### Futurized code (from %s)\n%s' %
+ (self.pathname, source))
+
+ code = compile(source, self.pathname, 'exec')
+
+ dirname = os.path.dirname(cachename)
+ try:
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ with open(cachename, 'wb') as f:
+ data = marshal.dumps(code)
+ f.write(data)
+ except Exception: # could be write-protected
+ pass
+ exec(code, mod.__dict__)
+ except Exception as e:
+ # must remove module from sys.modules
+ del sys.modules[fullname]
+ raise # keep it simple
+
+ if self.found[0]:
+ self.found[0].close()
+ return mod
+
+_hook = Py2Fixer()
+
+
+def install_hooks(include_paths=(), exclude_paths=()):
+ if isinstance(include_paths, str):
+ include_paths = (include_paths,)
+ if isinstance(exclude_paths, str):
+ exclude_paths = (exclude_paths,)
+ assert len(include_paths) + len(exclude_paths) > 0, 'Pass at least one argument'
+ _hook.include(include_paths)
+ _hook.exclude(exclude_paths)
+ # _hook.debug = debug
+ enable = sys.version_info[0] >= 3 # enabled for all 3.x
+ if enable and _hook not in sys.meta_path:
+ sys.meta_path.insert(0, _hook) # insert at beginning. This could be made a parameter
+
+ # We could return the hook when there are ways of configuring it
+ #return _hook
+
+
+def remove_hooks():
+ if _hook in sys.meta_path:
+ sys.meta_path.remove(_hook)
+
+
+def detect_hooks():
+ """
+ Returns True if the import hooks are installed, False if not.
+ """
+ return _hook in sys.meta_path
+ # present = any([hasattr(hook, 'PY2FIXER') for hook in sys.meta_path])
+ # return present
+
+
+class hooks(object):
+ """
+ Acts as a context manager. Use like this:
+
+ >>> from past import translation
+ >>> with translation.hooks():
+ ... import mypy2module
+ >>> import requests # py2/3 compatible anyway
+ >>> # etc.
+ """
+ def __enter__(self):
+ self.hooks_were_installed = detect_hooks()
+ install_hooks()
+ return self
+
+ def __exit__(self, *args):
+ if not self.hooks_were_installed:
+ remove_hooks()
+
+
+class suspend_hooks(object):
+ """
+ Acts as a context manager. Use like this:
+
+ >>> from past import translation
+ >>> translation.install_hooks()
+ >>> import http.client
+ >>> # ...
+ >>> with translation.suspend_hooks():
+ >>> import requests # or others that support Py2/3
+
+ If the hooks were disabled before the context, they are not installed when
+ the context is left.
+ """
+ def __enter__(self):
+ self.hooks_were_installed = detect_hooks()
+ remove_hooks()
+ return self
+ def __exit__(self, *args):
+ if self.hooks_were_installed:
+ install_hooks()
diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/types/__init__.py
new file mode 100644
index 000000000..91dd270f2
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/types/__init__.py
@@ -0,0 +1,29 @@
+"""
+Forward-ports of types from Python 2 for use with Python 3:
+
+- ``basestring``: equivalent to ``(str, bytes)`` in ``isinstance`` checks
+- ``dict``: with list-producing .keys() etc. methods
+- ``str``: bytes-like, but iterating over them doesn't product integers
+- ``long``: alias of Py3 int with ``L`` suffix in the ``repr``
+- ``unicode``: alias of Py3 str with ``u`` prefix in the ``repr``
+
+"""
+
+from past import utils
+
+if utils.PY2:
+ import __builtin__
+ basestring = __builtin__.basestring
+ dict = __builtin__.dict
+ str = __builtin__.str
+ long = __builtin__.long
+ unicode = __builtin__.unicode
+ __all__ = []
+else:
+ from .basestring import basestring
+ from .olddict import olddict
+ from .oldstr import oldstr
+ long = int
+ unicode = str
+ # from .unicode import unicode
+ __all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/basestring.py b/.install/.kodi/addons/script.module.future/libs/past/types/basestring.py
new file mode 100644
index 000000000..1cab22f6c
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/types/basestring.py
@@ -0,0 +1,39 @@
+"""
+An implementation of the basestring type for Python 3
+
+Example use:
+
+>>> s = b'abc'
+>>> assert isinstance(s, basestring)
+>>> from past.types import str as oldstr
+>>> s2 = oldstr(b'abc')
+>>> assert isinstance(s2, basestring)
+
+"""
+
+import sys
+
+from past.utils import with_metaclass, PY2
+
+if PY2:
+ str = unicode
+
+ver = sys.version_info[:2]
+
+
+class BaseBaseString(type):
+ def __instancecheck__(cls, instance):
+ return isinstance(instance, (bytes, str))
+
+ def __subclasshook__(cls, thing):
+ # TODO: What should go here?
+ raise NotImplemented
+
+
+class basestring(with_metaclass(BaseBaseString)):
+ """
+ A minimal backport of the Python 2 basestring type to Py3
+ """
+
+
+__all__ = ['basestring']
diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/olddict.py b/.install/.kodi/addons/script.module.future/libs/past/types/olddict.py
new file mode 100644
index 000000000..f4f92a26a
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/types/olddict.py
@@ -0,0 +1,96 @@
+"""
+A dict subclass for Python 3 that behaves like Python 2's dict
+
+Example use:
+
+>>> from past.builtins import dict
+>>> d1 = dict() # instead of {} for an empty dict
+>>> d2 = dict(key1='value1', key2='value2')
+
+The keys, values and items methods now return lists on Python 3.x and there are
+methods for iterkeys, itervalues, iteritems, and viewkeys etc.
+
+>>> for d in (d1, d2):
+... assert isinstance(d.keys(), list)
+... assert isinstance(d.values(), list)
+... assert isinstance(d.items(), list)
+"""
+
+import sys
+
+from past.utils import with_metaclass
+
+
+_builtin_dict = dict
+ver = sys.version_info[:2]
+
+
+class BaseOldDict(type):
+ def __instancecheck__(cls, instance):
+ return isinstance(instance, _builtin_dict)
+
+
+class olddict(with_metaclass(BaseOldDict, _builtin_dict)):
+ """
+ A backport of the Python 3 dict object to Py2
+ """
+ iterkeys = _builtin_dict.keys
+ viewkeys = _builtin_dict.keys
+
+ def keys(self):
+ return list(super(olddict, self).keys())
+
+ itervalues = _builtin_dict.values
+ viewvalues = _builtin_dict.values
+
+ def values(self):
+ return list(super(olddict, self).values())
+
+ iteritems = _builtin_dict.items
+ viewitems = _builtin_dict.items
+
+ def items(self):
+ return list(super(olddict, self).items())
+
+ def has_key(self, k):
+ """
+ D.has_key(k) -> True if D has a key k, else False
+ """
+ return k in self
+
+ # def __new__(cls, *args, **kwargs):
+ # """
+ # dict() -> new empty dictionary
+ # dict(mapping) -> new dictionary initialized from a mapping object's
+ # (key, value) pairs
+ # dict(iterable) -> new dictionary initialized as if via:
+ # d = {}
+ # for k, v in iterable:
+ # d[k] = v
+ # dict(**kwargs) -> new dictionary initialized with the name=value pairs
+ # in the keyword argument list. For example: dict(one=1, two=2)
+
+ # """
+ #
+ # if len(args) == 0:
+ # return super(olddict, cls).__new__(cls)
+ # # Was: elif isinstance(args[0], newbytes):
+ # # We use type() instead of the above because we're redefining
+ # # this to be True for all unicode string subclasses. Warning:
+ # # This may render newstr un-subclassable.
+ # elif type(args[0]) == olddict:
+ # return args[0]
+ # # elif isinstance(args[0], _builtin_dict):
+ # # value = args[0]
+ # else:
+ # value = args[0]
+ # return super(olddict, cls).__new__(cls, value)
+
+ def __native__(self):
+ """
+ Hook for the past.utils.native() function
+ """
+ return super(oldbytes, self)
+
+
+__all__ = ['olddict']
diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/oldstr.py b/.install/.kodi/addons/script.module.future/libs/past/types/oldstr.py
new file mode 100644
index 000000000..7768d3284
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/types/oldstr.py
@@ -0,0 +1,132 @@
+"""
+Pure-Python implementation of a Python 2-like str object for Python 3.
+"""
+
+from collections import Iterable
+from numbers import Integral
+
+from past.utils import PY2, with_metaclass
+
+
+_builtin_bytes = bytes
+
+
+class BaseOldStr(type):
+ def __instancecheck__(cls, instance):
+ return isinstance(instance, _builtin_bytes)
+
+
+def unescape(s):
+ """
+ Interprets strings with escape sequences
+
+ Example:
+ >>> s = unescape(r'abc\\def') # i.e. 'abc\\\\def'
+ >>> print(s)
+ 'abc\def'
+ >>> s2 = unescape('abc\\ndef')
+ >>> len(s2)
+ 8
+ >>> print(s2)
+ abc
+ def
+ """
+ return s.encode().decode('unicode_escape')
+
+
+class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)):
+ """
+ A forward port of the Python 2 8-bit string object to Py3
+ """
+ # Python 2 strings have no __iter__ method:
+ @property
+ def __iter__(self):
+ raise AttributeError
+
+ def __dir__(self):
+ return [thing for thing in dir(_builtin_bytes) if thing != '__iter__']
+
+ # def __new__(cls, *args, **kwargs):
+ # """
+ # From the Py3 bytes docstring:
+
+ # bytes(iterable_of_ints) -> bytes
+ # bytes(string, encoding[, errors]) -> bytes
+ # bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer
+ # bytes(int) -> bytes object of size given by the parameter initialized with null bytes
+ # bytes() -> empty bytes object
+ #
+ # Construct an immutable array of bytes from:
+ # - an iterable yielding integers in range(256)
+ # - a text string encoded using the specified encoding
+ # - any object implementing the buffer API.
+ # - an integer
+ # """
+ #
+ # if len(args) == 0:
+ # return super(newbytes, cls).__new__(cls)
+ # # Was: elif isinstance(args[0], newbytes):
+ # # We use type() instead of the above because we're redefining
+ # # this to be True for all unicode string subclasses. Warning:
+ # # This may render newstr un-subclassable.
+ # elif type(args[0]) == newbytes:
+ # return args[0]
+ # elif isinstance(args[0], _builtin_bytes):
+ # value = args[0]
+ # elif isinstance(args[0], unicode):
+ # if 'encoding' not in kwargs:
+ # raise TypeError('unicode string argument without an encoding')
+ # ###
+ # # Was: value = args[0].encode(**kwargs)
+ # # Python 2.6 string encode() method doesn't take kwargs:
+ # # Use this instead:
+ # newargs = [kwargs['encoding']]
+ # if 'errors' in kwargs:
+ # newargs.append(kwargs['errors'])
+ # value = args[0].encode(*newargs)
+ # ###
+ # elif isinstance(args[0], Iterable):
+ # if len(args[0]) == 0:
+ # # What is this?
+ # raise ValueError('unknown argument type')
+ # elif len(args[0]) > 0 and isinstance(args[0][0], Integral):
+ # # It's a list of integers
+ # value = b''.join([chr(x) for x in args[0]])
+ # else:
+ # raise ValueError('item cannot be interpreted as an integer')
+ # elif isinstance(args[0], Integral):
+ # if args[0] < 0:
+ # raise ValueError('negative count')
+ # value = b'\x00' * args[0]
+ # else:
+ # value = args[0]
+ # return super(newbytes, cls).__new__(cls, value)
+
+ def __repr__(self):
+ s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3
+ return s[1:]
+
+ def __str__(self):
+ s = super(oldstr, self).__str__() # e.g. "b'abc'" or "b'abc\\ndef'
+ # TODO: fix this:
+ assert s[:2] == "b'" and s[-1] == "'"
+ return unescape(s[2:-1]) # e.g. 'abc' or 'abc\ndef'
+
+ def __getitem__(self, y):
+ if isinstance(y, Integral):
+ return super(oldstr, self).__getitem__(slice(y, y+1))
+ else:
+ return super(oldstr, self).__getitem__(y)
+
+ def __getslice__(self, *args):
+ return self.__getitem__(slice(*args))
+
+ def __contains__(self, key):
+ if isinstance(key, int):
+ return False
+
+ def __native__(self):
+ return bytes(self)
+
+
+__all__ = ['oldstr']
diff --git a/.install/.kodi/addons/script.module.future/libs/past/utils/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/utils/__init__.py
new file mode 100644
index 000000000..c6606d0b9
--- /dev/null
+++ b/.install/.kodi/addons/script.module.future/libs/past/utils/__init__.py
@@ -0,0 +1,97 @@
+"""
+Various non-built-in utility functions and definitions for Py2
+compatibility in Py3.
+
+For example:
+
+ >>> # The old_div() function behaves like Python 2's / operator
+ >>> # without "from __future__ import division"
+ >>> from past.utils import old_div
+ >>> old_div(3, 2) # like 3/2 in Py2
+ 0
+ >>> old_div(3, 2.0) # like 3/2.0 in Py2
+ 1.5
+"""
+
+import sys
+import numbers
+
+PY3 = sys.version_info[0] == 3
+PY2 = sys.version_info[0] == 2
+PYPY = hasattr(sys, 'pypy_translation_info')
+
+
+def with_metaclass(meta, *bases):
+ """
+ Function from jinja2/_compat.py. License: BSD.
+
+ Use it like this::
+
+ class BaseForm(object):
+ pass
+
+ class FormType(type):
+ pass
+
+ class Form(with_metaclass(FormType, BaseForm)):
+ pass
+
+ This requires a bit of explanation: the basic idea is to make a
+ dummy metaclass for one level of class instantiation that replaces
+ itself with the actual metaclass. Because of internal type checks
+ we also need to make sure that we downgrade the custom metaclass
+ for one level to something closer to type (that's why __call__ and
+ __init__ comes back from type etc.).
+
+ This has the advantage over six.with_metaclass of not introducing
+ dummy classes into the final MRO.
+ """
+ class metaclass(meta):
+ __call__ = type.__call__
+ __init__ = type.__init__
+ def __new__(cls, name, this_bases, d):
+ if this_bases is None:
+ return type.__new__(cls, name, (), d)
+ return meta(name, bases, d)
+ return metaclass('temporary_class', None, {})
+
+
+def native(obj):
+ """
+ On Py2, this is a no-op: native(obj) -> obj
+
+ On Py3, returns the corresponding native Py3 types that are
+ superclasses for forward-ported objects from Py2:
+
+ >>> from past.builtins import str, dict
+
+ >>> native(str(b'ABC')) # Output on Py3 follows. On Py2, output is 'ABC'
+ b'ABC'
+ >>> type(native(str(b'ABC')))
+ bytes
+
+ Existing native types on Py3 will be returned unchanged:
+
+ >>> type(native(b'ABC'))
+ bytes
+ """
+ if hasattr(obj, '__native__'):
+ return obj.__native__()
+ else:
+ return obj
+
+
+# An alias for future.utils.old_div():
+def old_div(a, b):
+ """
+ Equivalent to ``a / b`` on Python 2 without ``from __future__ import
+ division``.
+
+ TODO: generalize this to other objects (like arrays etc.)
+ """
+ if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral):
+ return a // b
+ else:
+ return a / b
+
+__all__ = ['PY3', 'PY2', 'PYPY', 'with_metaclass', 'native', 'old_div']
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/README.md b/.install/.kodi/addons/script.module.inputstreamhelper/README.md
index 45be83155..bbdabb9ca 100644
--- a/.install/.kodi/addons/script.module.inputstreamhelper/README.md
+++ b/.install/.kodi/addons/script.module.inputstreamhelper/README.md
@@ -90,6 +90,9 @@ Please report any issues or bug reports on the [GitHub Issues](https://github.co
This module is licensed under the **The MIT License**. Please see the [LICENSE.txt](LICENSE.txt) file for details.
## Releases
+### v0.5.2 (2020-12-13)
+- Update Chrome OS ARM hardware id's (@mediaminister)
+
### v0.5.1 (2020-10-02)
- Fix incorrect ARM HWIDs: PHASER and PHASER360 (@dagwieers)
- Added Hebrew translations (@haggaie)
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml b/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml
index 67de6bb47..92a78707b 100644
--- a/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml
+++ b/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml
@@ -1,5 +1,5 @@
-
+
@@ -23,6 +23,9 @@
Un simple module Kodi qui simplifie la vie des développeurs de modules complémentaires en s’appuyant sur des modules complémentaires basés sur InputStream et sur la lecture de DRM.
Un módulo Kodi simple que hace la vida más fácil para los desarrolladores de complementos que dependen de complementos basados en InputStream y reproducción de DRM.
+v0.5.2 (2020-12-13)
+- Update Chrome OS ARM hardware id's
+
v0.5.1 (2020-10-02)
- Fix incorrect ARM HWIDs: PHASER and PHASER360
- Added Hebrew translations
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/__init__.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/__init__.pyo
new file mode 100644
index 000000000..e739c5e20
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py
index 67fdec006..cbbf45ada 100644
--- a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py
+++ b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py
@@ -79,36 +79,24 @@ WIDEVINE_CONFIG_NAME = 'manifest.json'
CHROMEOS_RECOVERY_URL = 'https://dl.google.com/dl/edgedl/chromeos/recovery/recovery.json'
-# Last updated: 2019-08-20 (version 12239.67.0)
+# To keep the Chrome OS ARM hardware ID list up to date, the following resources can be used:
+# https://www.chromium.org/chromium-os/developer-information-for-chrome-os-devices
+# https://cros-updates-serving.appspot.com/
+# Last updated: 2020-10-05
CHROMEOS_RECOVERY_ARM_HWIDS = [
- # 'ARKHAM',
- 'BIG',
- 'BLAZE',
'BOB',
- # 'DAISY',
'DRUWL',
'DUMO',
'ELM',
- 'EXPRESSO',
'FIEVEL',
'HANA',
- 'JAQ',
- 'JERRY',
+ 'JUNIPER-HVPU',
'KEVIN',
- 'KITTY',
+ 'KODAMA',
+ 'KRANE-ZDKS',
'MICKEY',
- 'MIGHTY',
- 'MINNIE',
- 'PI',
- 'PIT',
- 'RELM',
'SCARLET',
- 'SKATE',
- 'SNOW',
- 'SPEEDY',
- 'SPRING',
'TIGER',
- # 'WHIRLWIND',
]
CHROMEOS_BLOCK_SIZE = 512
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.pyo
new file mode 100644
index 000000000..944d0702c
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py
index aa107c473..0cc292578 100644
--- a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py
+++ b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py
@@ -7,6 +7,13 @@ from contextlib import contextmanager
import xbmc
import xbmcaddon
from xbmcgui import DialogProgress, DialogProgressBG
+
+try: # Kodi v19 or newer
+ from xbmcvfs import translatePath
+except ImportError: # Kodi v18 and older
+ # pylint: disable=ungrouped-imports
+ from xbmc import translatePath
+
from .unicodes import from_unicode, to_unicode
# NOTE: We need to explicitly add the add-on id here!
@@ -60,7 +67,7 @@ def kodi_version_major():
def translate_path(path):
"""Translate special xbmc paths"""
- return to_unicode(xbmc.translatePath(from_unicode(path)))
+ return to_unicode(translatePath(from_unicode(path)))
def get_addon_info(key):
@@ -186,14 +193,11 @@ def get_setting_int(key, default=None):
def get_setting_float(key, default=None):
"""Get an add-on setting as float"""
+ value = get_setting(key, default)
try:
- return ADDON.getSettingNumber(key)
- except (AttributeError, TypeError): # On Krypton or older, or when not a float
- value = get_setting(key, default)
- try:
- return float(value)
- except ValueError:
- return default
+ return float(value)
+ except ValueError:
+ return default
except RuntimeError: # Occurs when the add-on is disabled
return default
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.pyo
new file mode 100644
index 000000000..b7669f765
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/unicodes.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/unicodes.pyo
new file mode 100644
index 000000000..06bd760a0
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/unicodes.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/utils.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/utils.pyo
new file mode 100644
index 000000000..5a7114acc
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/utils.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/__init__.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/__init__.pyo
new file mode 100644
index 000000000..e225c7f33
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py
index 09b13fac7..fd6d3977e 100644
--- a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py
+++ b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py
@@ -147,6 +147,7 @@ def install_widevine_arm(backup_path):
localize(30018, diskspace=sizeof_fmt(required_diskspace)))
return False
+ log(2, 'Downloading best ChromeOS image for Widevine: {hwid} ({version})'.format(**arm_device))
url = arm_device['url']
downloaded = http_download(url, message=localize(30022), checksum=arm_device['sha1'], hash_alg='sha1',
dl_size=int(arm_device['zipfilesize'])) # Downloading the recovery image
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.pyo
new file mode 100644
index 000000000..435a439f5
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm_chromeos.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm_chromeos.pyo
new file mode 100644
index 000000000..478fcb386
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm_chromeos.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/widevine.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/widevine.pyo
new file mode 100644
index 000000000..5c3a4c009
Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/widevine.pyo differ
diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml b/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml
index 53b42eb1a..4a1dfe3a3 100644
--- a/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml
+++ b/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml
@@ -1,9 +1,9 @@
-
-
-
+
+
+
diff --git a/.install/.kodi/addons/script.module.parsedom/addon.xml b/.install/.kodi/addons/script.module.parsedom/addon.xml
new file mode 100644
index 000000000..e0ee2f436
--- /dev/null
+++ b/.install/.kodi/addons/script.module.parsedom/addon.xml
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+ all
+ Parsedom for xbmc plugins.
+
+
+
+ GPLv3
+ all
+
+
+ http://forum.xbmc.org/showthread.php?tid=116498
+ https://github.com/HenrikDK/xbmc-common-plugin-functions
+
+
+
\ No newline at end of file
diff --git a/.install/.kodi/addons/script.module.parsedom/changelog.txt b/.install/.kodi/addons/script.module.parsedom/changelog.txt
new file mode 100644
index 000000000..1d0d94bc8
--- /dev/null
+++ b/.install/.kodi/addons/script.module.parsedom/changelog.txt
@@ -0,0 +1,30 @@
+[B]Version 1.5.0[/B]
+- Fixed: proper fix for getParameters that only affects Frodo branch
+- Added: new function to get the version of xbmc as a float
+
+
+[B]Version 1.4.0[/B]
+- Special fix for eden branch to unbreak changes for Frodo
+
+[B]Version 1.3.0[/B]
+- Team xbmc decided to stop unquote-ing their path strings, so getParams now does it for them
+
+[B]Version 1.2.0[/B]
+- fetchPage should default to utf-8 encoding
+- parseDOM should handle utf-8 encoding
+
+[B]Version 1.1.0[/B]
+- Handle \t that breaks DOM variable extraction
+- Added extractJS function
+
+[B]Version 1.0.0[/B]
+- Minor fixes
+
+[B]Version 0.9.1[/B]
+- Stability and more functions
+- Add cookie support to fetchPage.
+- Add getCookieInfoAsHTML.
+- Add POST and Refering capabilities to fetchPage
+
+[B]Version 0.9.0[/B]
+- Initial public test run.
diff --git a/.install/.kodi/addons/script.module.parsedom/icon.png b/.install/.kodi/addons/script.module.parsedom/icon.png
new file mode 100644
index 000000000..ca4c53adb
Binary files /dev/null and b/.install/.kodi/addons/script.module.parsedom/icon.png differ
diff --git a/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.py b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.py
new file mode 100644
index 000000000..a6e5253eb
--- /dev/null
+++ b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.py
@@ -0,0 +1,558 @@
+'''
+ Parsedom for XBMC plugins
+ Copyright (C) 2010-2011 Tobias Ussing And Henrik Mosgaard Jensen
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+'''
+
+import sys
+import urllib
+import urllib2
+import re
+import io
+import inspect
+import time
+import HTMLParser
+#import chardet
+import json
+
+version = u"2.5.1"
+plugin = u"CommonFunctions-" + version
+print plugin
+
+USERAGENT = u"Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1"
+
+if hasattr(sys.modules["__main__"], "xbmc"):
+ xbmc = sys.modules["__main__"].xbmc
+else:
+ import xbmc
+
+if hasattr(sys.modules["__main__"], "xbmcgui"):
+ xbmcgui = sys.modules["__main__"].xbmcgui
+else:
+ import xbmcgui
+
+if hasattr(sys.modules["__main__"], "dbg"):
+ dbg = sys.modules["__main__"].dbg
+else:
+ dbg = False
+
+if hasattr(sys.modules["__main__"], "dbglevel"):
+ dbglevel = sys.modules["__main__"].dbglevel
+else:
+ dbglevel = 3
+
+if hasattr(sys.modules["__main__"], "opener"):
+ urllib2.install_opener(sys.modules["__main__"].opener)
+
+
+# This function raises a keyboard for user input
+def getUserInput(title=u"Input", default=u"", hidden=False):
+ log("", 5)
+ result = None
+
+ # Fix for when this functions is called with default=None
+ if not default:
+ default = u""
+
+ keyboard = xbmc.Keyboard(default, title)
+ keyboard.setHiddenInput(hidden)
+ keyboard.doModal()
+
+ if keyboard.isConfirmed():
+ result = keyboard.getText()
+
+ log(repr(result), 5)
+ return result
+
+
+# This function raises a keyboard numpad for user input
+def getUserInputNumbers(title=u"Input", default=u""):
+ log("", 5)
+ result = None
+
+ # Fix for when this functions is called with default=None
+ if not default:
+ default = u""
+
+ keyboard = xbmcgui.Dialog()
+ result = keyboard.numeric(0, title, default)
+
+ log(repr(result), 5)
+ return str(result)
+
+
+def getXBMCVersion():
+ log("", 3)
+ version = xbmc.getInfoLabel( "System.BuildVersion" )
+ log(version, 3)
+ for key in ["-", " "]:
+ if version.find(key) -1:
+ version = version[:version.find(key)]
+ version = float(version)
+ log(repr(version))
+ return version
+
+# Converts the request url passed on by xbmc to the plugin into a dict of key-value pairs
+def getParameters(parameterString):
+ log("", 5)
+ commands = {}
+ if getXBMCVersion() >= 12.0:
+ parameterString = urllib.unquote_plus(parameterString)
+ splitCommands = parameterString[parameterString.find('?') + 1:].split('&')
+
+ for command in splitCommands:
+ if (len(command) > 0):
+ splitCommand = command.split('=')
+ key = splitCommand[0]
+ try:
+ value = splitCommand[1].encode("utf-8")
+ except:
+ log("Error utf-8 encoding argument value: " + repr(splitCommand[1]))
+ value = splitCommand[1]
+
+ commands[key] = value
+
+ log(repr(commands), 5)
+ return commands
+
+
+def replaceHTMLCodes(txt):
+ log(repr(txt), 5)
+
+ # Fix missing ; in ;
+ txt = re.sub("([0-9]+)([^;^0-9]+)", "\\1;\\2", makeUTF8(txt))
+
+ txt = HTMLParser.HTMLParser().unescape(txt)
+ txt = txt.replace("&", "&")
+ log(repr(txt), 5)
+ return txt
+
+
+def stripTags(html):
+ log(repr(html), 5)
+ sub_start = html.find("<")
+ sub_end = html.find(">")
+ while sub_start < sub_end and sub_start > -1:
+ html = html.replace(html[sub_start:sub_end + 1], "").strip()
+ sub_start = html.find("<")
+ sub_end = html.find(">")
+
+ log(repr(html), 5)
+ return html
+
+
+def _getDOMContent(html, name, match, ret): # Cleanup
+ log("match: " + match, 3)
+
+ endstr = u"" + name # + ">"
+
+ start = html.find(match)
+ end = html.find(endstr, start)
+ pos = html.find("<" + name, start + 1 )
+
+ log(str(start) + " < " + str(end) + ", pos = " + str(pos) + ", endpos: " + str(end), 8)
+
+ while pos < end and pos != -1: # Ignore too early return
+ tend = html.find(endstr, end + len(endstr))
+ if tend != -1:
+ end = tend
+ pos = html.find("<" + name, pos + 1)
+ log("loop: " + str(start) + " < " + str(end) + " pos = " + str(pos), 8)
+
+ log("start: %s, len: %s, end: %s" % (start, len(match), end), 3)
+ if start == -1 and end == -1:
+ result = u""
+ elif start > -1 and end > -1:
+ result = html[start + len(match):end]
+ elif end > -1:
+ result = html[:end]
+ elif start > -1:
+ result = html[start + len(match):]
+
+ if ret:
+ endstr = html[end:html.find(">", html.find(endstr)) + 1]
+ result = match + result + endstr
+
+ log("done result length: " + str(len(result)), 3)
+ return result
+
+def _getDOMAttributes(match, name, ret):
+ log("", 3)
+
+ lst = re.compile('<' + name + '.*?' + ret + '=([\'"].[^>]*?[\'"])>', re.M | re.S).findall(match)
+ if len(lst) == 0:
+ lst = re.compile('<' + name + '.*?' + ret + '=(.[^>]*?)>', re.M | re.S).findall(match)
+ ret = []
+ for tmp in lst:
+ cont_char = tmp[0]
+ if cont_char in "'\"":
+ log("Using %s as quotation mark" % cont_char, 3)
+
+ # Limit down to next variable.
+ if tmp.find('=' + cont_char, tmp.find(cont_char, 1)) > -1:
+ tmp = tmp[:tmp.find('=' + cont_char, tmp.find(cont_char, 1))]
+
+ # Limit to the last quotation mark
+ if tmp.rfind(cont_char, 1) > -1:
+ tmp = tmp[1:tmp.rfind(cont_char)]
+ else:
+ log("No quotation mark found", 3)
+ if tmp.find(" ") > 0:
+ tmp = tmp[:tmp.find(" ")]
+ elif tmp.find("/") > 0:
+ tmp = tmp[:tmp.find("/")]
+ elif tmp.find(">") > 0:
+ tmp = tmp[:tmp.find(">")]
+
+ ret.append(tmp.strip())
+
+ log("Done: " + repr(ret), 3)
+ return ret
+
+def _getDOMElements(item, name, attrs):
+ log("", 3)
+
+ lst = []
+ for key in attrs:
+ lst2 = re.compile('(<' + name + '[^>]*?(?:' + key + '=[\'"]' + attrs[key] + '[\'"].*?>))', re.M | re.S).findall(item)
+ if len(lst2) == 0 and attrs[key].find(" ") == -1: # Try matching without quotation marks
+ lst2 = re.compile('(<' + name + '[^>]*?(?:' + key + '=' + attrs[key] + '.*?>))', re.M | re.S).findall(item)
+
+ if len(lst) == 0:
+ log("Setting main list " + repr(lst2), 5)
+ lst = lst2
+ lst2 = []
+ else:
+ log("Setting new list " + repr(lst2), 5)
+ test = range(len(lst))
+ test.reverse()
+ for i in test: # Delete anything missing from the next list.
+ if not lst[i] in lst2:
+ log("Purging mismatch " + str(len(lst)) + " - " + repr(lst[i]), 3)
+ del(lst[i])
+
+ if len(lst) == 0 and attrs == {}:
+ log("No list found, trying to match on name only", 3)
+ lst = re.compile('(<' + name + '>)', re.M | re.S).findall(item)
+ if len(lst) == 0:
+ lst = re.compile('(<' + name + ' .*?>)', re.M | re.S).findall(item)
+
+ log("Done: " + str(type(lst)), 3)
+ return lst
+
+def parseDOM(html, name=u"", attrs={}, ret=False):
+ log("Name: " + repr(name) + " - Attrs:" + repr(attrs) + " - Ret: " + repr(ret) + " - HTML: " + str(type(html)), 3)
+
+ if isinstance(name, str): # Should be handled
+ try:
+ name = name #.decode("utf-8")
+ except:
+ log("Couldn't decode name binary string: " + repr(name))
+
+ if isinstance(html, str):
+ try:
+ html = [html.decode("utf-8")] # Replace with chardet thingy
+ except:
+ log("Couldn't decode html binary string. Data length: " + repr(len(html)))
+ html = [html]
+ elif isinstance(html, unicode):
+ html = [html]
+ elif not isinstance(html, list):
+ log("Input isn't list or string/unicode.")
+ return u""
+
+ if not name.strip():
+ log("Missing tag name")
+ return u""
+
+ ret_lst = []
+ for item in html:
+ temp_item = re.compile('(<[^>]*?\n[^>]*?>)').findall(item)
+ for match in temp_item:
+ item = item.replace(match, match.replace("\n", " "))
+
+ lst = _getDOMElements(item, name, attrs)
+
+ if isinstance(ret, str):
+ log("Getting attribute %s content for %s matches " % (ret, len(lst) ), 3)
+ lst2 = []
+ for match in lst:
+ lst2 += _getDOMAttributes(match, name, ret)
+ lst = lst2
+ else:
+ log("Getting element content for %s matches " % len(lst), 3)
+ lst2 = []
+ for match in lst:
+ log("Getting element content for %s" % match, 4)
+ temp = _getDOMContent(item, name, match, ret).strip()
+ item = item[item.find(temp, item.find(match)) + len(temp):]
+ lst2.append(temp)
+ lst = lst2
+ ret_lst += lst
+
+ log("Done: " + repr(ret_lst), 3)
+ return ret_lst
+
+
+def extractJS(data, function=False, variable=False, match=False, evaluate=False, values=False):
+ log("")
+ scripts = parseDOM(data, "script")
+ if len(scripts) == 0:
+ log("Couldn't find any script tags. Assuming javascript file was given.")
+ scripts = [data]
+
+ lst = []
+ log("Extracting", 4)
+ for script in scripts:
+ tmp_lst = []
+ if function:
+ tmp_lst = re.compile(function + '\(.*?\).*?;', re.M | re.S).findall(script)
+ elif variable:
+ tmp_lst = re.compile(variable + '[ ]+=.*?;', re.M | re.S).findall(script)
+ else:
+ tmp_lst = [script]
+ if len(tmp_lst) > 0:
+ log("Found: " + repr(tmp_lst), 4)
+ lst += tmp_lst
+ else:
+ log("Found nothing on: " + script, 4)
+
+ test = range(0, len(lst))
+ test.reverse()
+ for i in test:
+ if match and lst[i].find(match) == -1:
+ log("Removing item: " + repr(lst[i]), 10)
+ del lst[i]
+ else:
+ log("Cleaning item: " + repr(lst[i]), 4)
+ if lst[i][0] == u"\n":
+ lst[i] == lst[i][1:]
+ if lst[i][len(lst) -1] == u"\n":
+ lst[i] == lst[i][:len(lst)- 2]
+ lst[i] = lst[i].strip()
+
+ if values or evaluate:
+ for i in range(0, len(lst)):
+ log("Getting values %s" % lst[i])
+ if function:
+ if evaluate: # include the ( ) for evaluation
+ data = re.compile("(\(.*?\))", re.M | re.S).findall(lst[i])
+ else:
+ data = re.compile("\((.*?)\)", re.M | re.S).findall(lst[i])
+ elif variable:
+ tlst = re.compile(variable +".*?=.*?;", re.M | re.S).findall(lst[i])
+ data = []
+ for tmp in tlst: # This breaks for some stuff. "ad_tag": "http://ad-emea.doubleclick.net/N4061/pfadx/com.ytpwatch.entertainment/main_563326'' # ends early, must end with }
+ cont_char = tmp[0]
+ cont_char = tmp[tmp.find("=") + 1:].strip()
+ cont_char = cont_char[0]
+ if cont_char in "'\"":
+ log("Using %s as quotation mark" % cont_char, 1)
+ tmp = tmp[tmp.find(cont_char) + 1:tmp.rfind(cont_char)]
+ else:
+ log("No quotation mark found", 1)
+ tmp = tmp[tmp.find("=") + 1: tmp.rfind(";")]
+
+ tmp = tmp.strip()
+ if len(tmp) > 0:
+ data.append(tmp)
+ else:
+ log("ERROR: Don't know what to extract values from")
+
+ log("Values extracted: %s" % repr(data))
+ if len(data) > 0:
+ lst[i] = data[0]
+
+ if evaluate:
+ for i in range(0, len(lst)):
+ log("Evaluating %s" % lst[i])
+ data = lst[i].strip()
+ try:
+ try:
+ lst[i] = json.loads(data)
+ except:
+ log("Couldn't json.loads, trying eval")
+ lst[i] = eval(data)
+ except:
+ log("Couldn't eval: %s from %s" % (repr(data), repr(lst[i])))
+
+ log("Done: " + str(len(lst)))
+ return lst
+
+def fetchPage(params={}):
+ get = params.get
+ link = get("link")
+ ret_obj = {}
+ if get("post_data"):
+ log("called for : " + repr(params['link']))
+ else:
+ log("called for : " + repr(params))
+
+ if not link or int(get("error", "0")) > 2:
+ log("giving up")
+ ret_obj["status"] = 500
+ return ret_obj
+
+ if get("post_data"):
+ if get("hide_post_data"):
+ log("Posting data", 2)
+ else:
+ log("Posting data: " + urllib.urlencode(get("post_data")), 2)
+
+ request = urllib2.Request(link, urllib.urlencode(get("post_data")))
+ request.add_header('Content-Type', 'application/x-www-form-urlencoded')
+ else:
+ log("Got request", 2)
+ request = urllib2.Request(link)
+
+ if get("headers"):
+ for head in get("headers"):
+ request.add_header(head[0], head[1])
+
+ request.add_header('User-Agent', USERAGENT)
+
+ if get("cookie"):
+ request.add_header('Cookie', get("cookie"))
+
+ if get("refering"):
+ request.add_header('Referer', get("refering"))
+
+ try:
+ log("connecting to server...", 1)
+
+ con = urllib2.urlopen(request)
+ ret_obj["header"] = con.info()
+ ret_obj["new_url"] = con.geturl()
+ if get("no-content", "false") == u"false" or get("no-content", "false") == "false":
+ inputdata = con.read()
+ #data_type = chardet.detect(inputdata)
+ #inputdata = inputdata.decode(data_type["encoding"])
+ ret_obj["content"] = inputdata.decode("utf-8")
+
+ con.close()
+
+ log("Done")
+ ret_obj["status"] = 200
+ return ret_obj
+
+ except urllib2.HTTPError, e:
+ err = str(e)
+ log("HTTPError : " + err)
+ log("HTTPError - Headers: " + str(e.headers) + " - Content: " + e.fp.read())
+
+ params["error"] = str(int(get("error", "0")) + 1)
+ ret = fetchPage(params)
+
+ if not "content" in ret and e.fp:
+ ret["content"] = e.fp.read()
+ return ret
+
+ ret_obj["status"] = 500
+ return ret_obj
+
+ except urllib2.URLError, e:
+ err = str(e)
+ log("URLError : " + err)
+
+ time.sleep(3)
+ params["error"] = str(int(get("error", "0")) + 1)
+ ret_obj = fetchPage(params)
+ return ret_obj
+
+
+def getCookieInfoAsHTML():
+ log("", 5)
+ if hasattr(sys.modules["__main__"], "cookiejar"):
+ cookiejar = sys.modules["__main__"].cookiejar
+
+ cookie = repr(cookiejar)
+ cookie = cookie.replace("<_LWPCookieJar.LWPCookieJar[", "")
+ cookie = cookie.replace("), Cookie(version=0,", ">", "> ")
+ cookie = cookie.replace("Cookie(version=0,", "= 0x02050000:
+ # return data
+
+ try:
+ return data.encode('ascii', "ignore")
+ except:
+ log("Hit except on : " + repr(data))
+ s = u""
+ for i in data:
+ try:
+ i.encode("ascii", "ignore")
+ except:
+ log("Can't convert character", 4)
+ continue
+ else:
+ s += i
+
+ log(repr(s), 5)
+ return s
+
+
+# This function handles stupid utf handling in python.
+def makeUTF8(data):
+ log(repr(data), 5)
+ return data
+ try:
+ return data.decode('utf8', 'xmlcharrefreplace') # was 'ignore'
+ except:
+ log("Hit except on : " + repr(data))
+ s = u""
+ for i in data:
+ try:
+ i.decode("utf8", "xmlcharrefreplace")
+ except:
+ log("Can't convert character", 4)
+ continue
+ else:
+ s += i
+ log(repr(s), 5)
+ return s
+
+
+def openFile(filepath, options=u"r"):
+ log(repr(filepath) + " - " + repr(options))
+ if options.find("b") == -1: # Toggle binary mode on failure
+ alternate = options + u"b"
+ else:
+ alternate = options.replace(u"b", u"")
+
+ try:
+ log("Trying normal: %s" % options)
+ return io.open(filepath, options)
+ except:
+ log("Fallback to binary: %s" % alternate)
+ return io.open(filepath, alternate)
+
+
+def log(description, level=0):
+ if dbg and dbglevel > level:
+ try:
+ xbmc.log((u"[%s] %s : '%s'" % (plugin, inspect.stack()[1][3], description)).decode("utf-8"), xbmc.LOGNOTICE)
+ except:
+ xbmc.log(u"FALLBACK [%s] %s : '%s'" % (plugin, inspect.stack()[1][3], repr(description)), xbmc.LOGNOTICE)
diff --git a/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.pyo b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.pyo
new file mode 100644
index 000000000..756b41a36
Binary files /dev/null and b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.pyo differ
diff --git a/.install/.kodi/addons/script.module.pylast/LICENSE.md b/.install/.kodi/addons/script.module.pylast/LICENSE.md
new file mode 100644
index 000000000..8dada3eda
--- /dev/null
+++ b/.install/.kodi/addons/script.module.pylast/LICENSE.md
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/.install/.kodi/addons/script.module.pylast/README.md b/.install/.kodi/addons/script.module.pylast/README.md
new file mode 100644
index 000000000..d20a717df
--- /dev/null
+++ b/.install/.kodi/addons/script.module.pylast/README.md
@@ -0,0 +1,103 @@
+pyLast
+======
+
+[![Build status](https://travis-ci.org/pylast/pylast.svg?branch=develop)](https://travis-ci.org/pylast/pylast)
+[![PyPI version](https://img.shields.io/pypi/v/pylast.svg)](https://pypi.python.org/pypi/pylast/)
+[![PyPI downloads](https://img.shields.io/pypi/dm/pylast.svg)](https://pypi.python.org/pypi/pylast/)
+[![Coverage (Codecov)](https://codecov.io/gh/pylast/pylast/branch/develop/graph/badge.svg)](https://codecov.io/gh/pylast/pylast)
+[![Coverage (Coveralls)](https://coveralls.io/repos/github/pylast/pylast/badge.svg?branch=develop)](https://coveralls.io/github/pylast/pylast?branch=develop)
+[![Code health](https://landscape.io/github/pylast/pylast/develop/landscape.svg)](https://landscape.io/github/hugovk/pylast/develop)
+
+
+A Python interface to [Last.fm](http://www.last.fm/) and other API-compatible websites such as [Libre.fm](http://libre.fm/).
+
+Try using the pydoc utility for help on usage or see [test_pylast.py](tests/test_pylast.py) for examples.
+
+Installation
+------------
+
+Install via pip:
+
+ pip install pylast
+
+
+Features
+--------
+
+ * Simple public interface.
+ * Access to all the data exposed by the Last.fm web services.
+ * Scrobbling support.
+ * Full object-oriented design.
+ * Proxy support.
+ * Internal caching support for some web services calls (disabled by default).
+ * Support for other API-compatible networks like Libre.fm.
+ * Python 3-friendly (Starting from 0.5).
+
+
+Getting Started
+---------------
+
+Here's some simple code example to get you started. In order to create any object from pyLast, you need a `Network` object which represents a social music network that is Last.fm or any other API-compatible one. You can obtain a pre-configured one for Last.fm and use it as follows:
+
+```python
+import pylast
+
+# You have to have your own unique two values for API_KEY and API_SECRET
+# Obtain yours from http://www.last.fm/api/account/create for Last.fm
+API_KEY = "b25b959554ed76058ac220b7b2e0a026" # this is a sample key
+API_SECRET = "425b55975eed76058ac220b7b4e8a054"
+
+# In order to perform a write operation you need to authenticate yourself
+username = "your_user_name"
+password_hash = pylast.md5("your_password")
+
+network = pylast.LastFMNetwork(api_key=API_KEY, api_secret=API_SECRET,
+ username=username, password_hash=password_hash)
+
+# Now you can use that object everywhere
+artist = network.get_artist("System of a Down")
+artist.shout("<3")
+
+
+track = network.get_track("Iron Maiden", "The Nomad")
+track.love()
+track.add_tags(("awesome", "favorite"))
+
+# Type help(pylast.LastFMNetwork) or help(pylast) in a Python interpreter
+# to get more help about anything and see examples of how it works
+```
+
+More examples in hugovk/lastfm-tools and [test_pylast.py](test_pylast.py).
+
+Testing
+-------
+
+[tests/test_pylast.py](tests/test_pylast.py) contains integration tests with Last.fm, and plenty of code examples. Unit tests are also in the [tests/](tests/) directory.
+
+For integration tests you need a test account at Last.fm that will become cluttered with test data, and an API key and secret. Either copy [example_test_pylast.yaml](example_test_pylast.yaml) to test_pylast.yaml and fill out the credentials, or set them as environment variables like:
+
+```sh
+export PYLAST_USERNAME=TODO_ENTER_YOURS_HERE
+export PYLAST_PASSWORD_HASH=TODO_ENTER_YOURS_HERE
+export PYLAST_API_KEY=TODO_ENTER_YOURS_HERE
+export PYLAST_API_SECRET=TODO_ENTER_YOURS_HERE
+```
+
+To run all unit and integration tests:
+```sh
+pip install pytest flaky mock
+py.test
+```
+
+Or run just one test case:
+```sh
+py.test -k test_scrobble
+```
+
+To run with coverage:
+```sh
+py.test -v --cov pylast --cov-report term-missing
+coverage report # for command-line report
+coverage html # for HTML report
+open htmlcov/index.html
+```
diff --git a/.install/.kodi/addons/script.module.pylast/addon.xml b/.install/.kodi/addons/script.module.pylast/addon.xml
new file mode 100644
index 000000000..24885c63f
--- /dev/null
+++ b/.install/.kodi/addons/script.module.pylast/addon.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+
+ A Python interface to Last.fm and Libre.fm
+ A Python interface to Last.fm and Libre.fm
+
+ https://pypi.python.org/pypi/pylast
+ Apache2
+ all
+ https://github.com/pylast/pylast
+
+
+ icon.png
+
+
+
diff --git a/.install/.kodi/addons/script.module.pylast/icon.png b/.install/.kodi/addons/script.module.pylast/icon.png
new file mode 100644
index 000000000..ca4c53adb
Binary files /dev/null and b/.install/.kodi/addons/script.module.pylast/icon.png differ
diff --git a/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.py b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.py
new file mode 100644
index 000000000..e533fde29
--- /dev/null
+++ b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.py
@@ -0,0 +1,4614 @@
+# -*- coding: utf-8 -*-
+#
+# pylast -
+# A Python interface to Last.fm and Libre.fm
+#
+# Copyright 2008-2010 Amr Hassan
+# Copyright 2013-2017 hugovk
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# https://github.com/pylast/pylast
+
+import hashlib
+from xml.dom import minidom, Node
+import xml.dom
+import time
+import shelve
+import tempfile
+import sys
+import collections
+import warnings
+import re
+import six
+
+__version__ = '1.8.0'
+__author__ = 'Amr Hassan, hugovk'
+__copyright__ = "Copyright (C) 2008-2010 Amr Hassan, 2013-2017 hugovk"
+__license__ = "apache2"
+__email__ = 'amr.hassan@gmail.com'
+
+
+def _deprecation_warning(message):
+ warnings.warn(message, DeprecationWarning)
+
+
+def _can_use_ssl_securely():
+ # Python 3.3 doesn't support create_default_context() but can be made to
+ # work sanely.
+ # <2.7.9 and <3.2 never did any SSL verification so don't do SSL there.
+ # >3.4 and >2.7.9 has sane defaults so use SSL there.
+ v = sys.version_info
+ return v > (3, 3) or ((2, 7, 9) < v < (3, 0))
+
+
+if _can_use_ssl_securely():
+ import ssl
+
+if sys.version_info[0] == 3:
+ if _can_use_ssl_securely():
+ from http.client import HTTPSConnection
+ else:
+ from http.client import HTTPConnection
+ import html.entities as htmlentitydefs
+ from urllib.parse import splithost as url_split_host
+ from urllib.parse import quote_plus as url_quote_plus
+
+ unichr = chr
+
+elif sys.version_info[0] == 2:
+ if _can_use_ssl_securely():
+ from httplib import HTTPSConnection
+ else:
+ from httplib import HTTPConnection
+ import htmlentitydefs
+ from urllib import splithost as url_split_host
+ from urllib import quote_plus as url_quote_plus
+
+STATUS_INVALID_SERVICE = 2
+STATUS_INVALID_METHOD = 3
+STATUS_AUTH_FAILED = 4
+STATUS_INVALID_FORMAT = 5
+STATUS_INVALID_PARAMS = 6
+STATUS_INVALID_RESOURCE = 7
+STATUS_TOKEN_ERROR = 8
+STATUS_INVALID_SK = 9
+STATUS_INVALID_API_KEY = 10
+STATUS_OFFLINE = 11
+STATUS_SUBSCRIBERS_ONLY = 12
+STATUS_INVALID_SIGNATURE = 13
+STATUS_TOKEN_UNAUTHORIZED = 14
+STATUS_TOKEN_EXPIRED = 15
+
+EVENT_ATTENDING = '0'
+EVENT_MAYBE_ATTENDING = '1'
+EVENT_NOT_ATTENDING = '2'
+
+PERIOD_OVERALL = 'overall'
+PERIOD_7DAYS = '7day'
+PERIOD_1MONTH = '1month'
+PERIOD_3MONTHS = '3month'
+PERIOD_6MONTHS = '6month'
+PERIOD_12MONTHS = '12month'
+
+DOMAIN_ENGLISH = 0
+DOMAIN_GERMAN = 1
+DOMAIN_SPANISH = 2
+DOMAIN_FRENCH = 3
+DOMAIN_ITALIAN = 4
+DOMAIN_POLISH = 5
+DOMAIN_PORTUGUESE = 6
+DOMAIN_SWEDISH = 7
+DOMAIN_TURKISH = 8
+DOMAIN_RUSSIAN = 9
+DOMAIN_JAPANESE = 10
+DOMAIN_CHINESE = 11
+
+COVER_SMALL = 0
+COVER_MEDIUM = 1
+COVER_LARGE = 2
+COVER_EXTRA_LARGE = 3
+COVER_MEGA = 4
+
+IMAGES_ORDER_POPULARITY = "popularity"
+IMAGES_ORDER_DATE = "dateadded"
+
+
+USER_MALE = 'Male'
+USER_FEMALE = 'Female'
+
+SCROBBLE_SOURCE_USER = "P"
+SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST = "R"
+SCROBBLE_SOURCE_PERSONALIZED_BROADCAST = "E"
+SCROBBLE_SOURCE_LASTFM = "L"
+SCROBBLE_SOURCE_UNKNOWN = "U"
+
+SCROBBLE_MODE_PLAYED = ""
+SCROBBLE_MODE_LOVED = "L"
+SCROBBLE_MODE_BANNED = "B"
+SCROBBLE_MODE_SKIPPED = "S"
+
+# From http://boodebr.org/main/python/all-about-python-and-unicode#UNI_XML
+RE_XML_ILLEGAL = (u'([\u0000-\u0008\u000b-\u000c\u000e-\u001f\ufffe-\uffff])' +
+ u'|' +
+ u'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])'
+ %
+ (unichr(0xd800), unichr(0xdbff), unichr(0xdc00),
+ unichr(0xdfff), unichr(0xd800), unichr(0xdbff),
+ unichr(0xdc00), unichr(0xdfff), unichr(0xd800),
+ unichr(0xdbff), unichr(0xdc00), unichr(0xdfff)))
+
+XML_ILLEGAL = re.compile(RE_XML_ILLEGAL)
+
+# Python <=3.3 doesn't support create_default_context()
+# <2.7.9 and <3.2 never did any SSL verification
+# FIXME This can be removed after 2017-09 when 3.3 is no longer supported and
+# pypy3 uses 3.4 or later, see
+# https://en.wikipedia.org/wiki/CPython#Version_history
+if sys.version_info[0] == 3 and sys.version_info[1] == 3:
+ import certifi
+ SSL_CONTEXT = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ SSL_CONTEXT.verify_mode = ssl.CERT_REQUIRED
+ SSL_CONTEXT.options |= ssl.OP_NO_COMPRESSION
+ # Intermediate from https://wiki.mozilla.org/Security/Server_Side_TLS
+ # Create the cipher string
+ cipher_string = """
+ ECDHE-ECDSA-CHACHA20-POLY1305
+ ECDHE-RSA-CHACHA20-POLY1305
+ ECDHE-ECDSA-AES128-GCM-SHA256
+ ECDHE-RSA-AES128-GCM-SHA256
+ ECDHE-ECDSA-AES256-GCM-SHA384
+ ECDHE-RSA-AES256-GCM-SHA384
+ DHE-RSA-AES128-GCM-SHA256
+ DHE-RSA-AES256-GCM-SHA384
+ ECDHE-ECDSA-AES128-SHA256
+ ECDHE-RSA-AES128-SHA256
+ ECDHE-ECDSA-AES128-SHA
+ ECDHE-RSA-AES256-SHA384
+ ECDHE-RSA-AES128-SHA
+ ECDHE-ECDSA-AES256-SHA384
+ ECDHE-ECDSA-AES256-SHA
+ ECDHE-RSA-AES256-SHA
+ DHE-RSA-AES128-SHA256
+ DHE-RSA-AES128-SHA
+ DHE-RSA-AES256-SHA256
+ DHE-RSA-AES256-SHA
+ ECDHE-ECDSA-DES-CBC3-SHA
+ ECDHE-RSA-DES-CBC3-SHA
+ EDH-RSA-DES-CBC3-SHA
+ AES128-GCM-SHA256
+ AES256-GCM-SHA384
+ AES128-SHA256
+ AES256-SHA256
+ AES128-SHA
+ AES256-SHA
+ DES-CBC3-SHA
+ !DSS
+ """
+ cipher_string = ' '.join(cipher_string.split())
+ SSL_CONTEXT.set_ciphers(cipher_string)
+ SSL_CONTEXT.load_verify_locations(certifi.where())
+
+# Python >3.4 and >2.7.9 has sane defaults
+elif sys.version_info > (3, 4) or ((2, 7, 9) < sys.version_info < (3, 0)):
+ SSL_CONTEXT = ssl.create_default_context()
+
+
+class _Network(object):
+ """
+ A music social network website such as Last.fm or
+ one with a Last.fm-compatible API.
+ """
+
+ def __init__(
+ self, name, homepage, ws_server, api_key, api_secret, session_key,
+ submission_server, username, password_hash, domain_names, urls,
+ token=None):
+ """
+ name: the name of the network
+ homepage: the homepage URL
+ ws_server: the URL of the webservices server
+ api_key: a provided API_KEY
+ api_secret: a provided API_SECRET
+ session_key: a generated session_key or None
+ submission_server: the URL of the server to which tracks are
+ submitted (scrobbled)
+ username: a username of a valid user
+ password_hash: the output of pylast.md5(password) where password is
+ the user's password
+ domain_names: a dict mapping each DOMAIN_* value to a string domain
+ name
+ urls: a dict mapping types to URLs
+ token: an authentication token to retrieve a session
+
+ if username and password_hash were provided and not session_key,
+ session_key will be generated automatically when needed.
+
+ Either a valid session_key or a combination of username and
+ password_hash must be present for scrobbling.
+
+ You should use a preconfigured network object through a
+ get_*_network(...) method instead of creating an object
+ of this class, unless you know what you're doing.
+ """
+
+ self.name = name
+ self.homepage = homepage
+ self.ws_server = ws_server
+ self.api_key = api_key
+ self.api_secret = api_secret
+ self.session_key = session_key
+ self.submission_server = submission_server
+ self.username = username
+ self.password_hash = password_hash
+ self.domain_names = domain_names
+ self.urls = urls
+
+ self.cache_backend = None
+ self.proxy_enabled = False
+ self.proxy = None
+ self.last_call_time = 0
+ self.limit_rate = False
+
+ # Load session_key from authentication token if provided
+ if token and not self.session_key:
+ sk_gen = SessionKeyGenerator(self)
+ self.session_key = sk_gen.get_web_auth_session_key(
+ url=None, token=token)
+
+ # Generate a session_key if necessary
+ if ((self.api_key and self.api_secret) and not self.session_key and
+ (self.username and self.password_hash)):
+ sk_gen = SessionKeyGenerator(self)
+ self.session_key = sk_gen.get_session_key(
+ self.username, self.password_hash)
+
+ def __str__(self):
+ return "%s Network" % self.name
+
+ def get_artist(self, artist_name):
+ """
+ Return an Artist object
+ """
+
+ return Artist(artist_name, self)
+
+ def get_track(self, artist, title):
+ """
+ Return a Track object
+ """
+
+ return Track(artist, title, self)
+
+ def get_album(self, artist, title):
+ """
+ Return an Album object
+ """
+
+ return Album(artist, title, self)
+
+ def get_authenticated_user(self):
+ """
+ Returns the authenticated user
+ """
+
+ return AuthenticatedUser(self)
+
+ def get_country(self, country_name):
+ """
+ Returns a country object
+ """
+
+ return Country(country_name, self)
+
+ def get_metro(self, metro_name, country_name):
+ """
+ Returns a metro object
+ """
+
+ return Metro(metro_name, country_name, self)
+
+ def get_group(self, name):
+ """
+ Returns a Group object
+ """
+
+ return Group(name, self)
+
+ def get_user(self, username):
+ """
+ Returns a user object
+ """
+
+ return User(username, self)
+
+ def get_tag(self, name):
+ """
+ Returns a tag object
+ """
+
+ return Tag(name, self)
+
+ def get_scrobbler(self, client_id, client_version):
+ """
+ Returns a Scrobbler object used for submitting tracks to the server
+
+ Quote from http://www.last.fm/api/submissions:
+ ========
+ Client identifiers are used to provide a centrally managed database
+ of the client versions, allowing clients to be banned if they are
+ found to be behaving undesirably. The client ID is associated with
+ a version number on the server, however these are only incremented
+ if a client is banned and do not have to reflect the version of the
+ actual client application.
+
+ During development, clients which have not been allocated an
+ identifier should use the identifier tst, with a version number of
+ 1.0. Do not distribute code or client implementations which use
+ this test identifier. Do not use the identifiers used by other
+ clients.
+ =========
+
+ To obtain a new client identifier please contact:
+ * Last.fm: submissions@last.fm
+ * # TODO: list others
+
+ ...and provide us with the name of your client and its homepage
+ address.
+ """
+
+ _deprecation_warning(
+ "Use _Network.scrobble(...), _Network.scrobble_many(...),"
+ " and Network.update_now_playing(...) instead")
+
+ return Scrobbler(self, client_id, client_version)
+
+ def _get_language_domain(self, domain_language):
+ """
+ Returns the mapped domain name of the network to a DOMAIN_* value
+ """
+
+ if domain_language in self.domain_names:
+ return self.domain_names[domain_language]
+
+ def _get_url(self, domain, url_type):
+ return "http://%s/%s" % (
+ self._get_language_domain(domain), self.urls[url_type])
+
+ def _get_ws_auth(self):
+ """
+ Returns an (API_KEY, API_SECRET, SESSION_KEY) tuple.
+ """
+ return (self.api_key, self.api_secret, self.session_key)
+
+ def _delay_call(self):
+ """
+ Makes sure that web service calls are at least 0.2 seconds apart.
+ """
+
+ # Delay time in seconds from section 4.4 of http://www.last.fm/api/tos
+ DELAY_TIME = 0.2
+ now = time.time()
+
+ time_since_last = now - self.last_call_time
+
+ if time_since_last < DELAY_TIME:
+ time.sleep(DELAY_TIME - time_since_last)
+
+ self.last_call_time = now
+
+ def create_new_playlist(self, title, description):
+ """
+ Creates a playlist for the authenticated user and returns it
+ title: The title of the new playlist.
+ description: The description of the new playlist.
+ """
+
+ params = {}
+ params['title'] = title
+ params['description'] = description
+
+ doc = _Request(self, 'playlist.create', params).execute(False)
+
+ e_id = doc.getElementsByTagName("id")[0].firstChild.data
+ user = doc.getElementsByTagName('playlists')[0].getAttribute('user')
+
+ return Playlist(user, e_id, self)
+
+ def get_top_artists(self, limit=None, cacheable=True):
+ """Returns the most played artists as a sequence of TopItem objects."""
+
+ params = {}
+ if limit:
+ params["limit"] = limit
+
+ doc = _Request(self, "chart.getTopArtists", params).execute(cacheable)
+
+ return _extract_top_artists(doc, self)
+
+ def get_top_tracks(self, limit=None, cacheable=True):
+ """Returns the most played tracks as a sequence of TopItem objects."""
+
+ params = {}
+ if limit:
+ params["limit"] = limit
+
+ doc = _Request(self, "chart.getTopTracks", params).execute(cacheable)
+
+ seq = []
+ for node in doc.getElementsByTagName("track"):
+ title = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ track = Track(artist, title, self)
+ weight = _number(_extract(node, "playcount"))
+ seq.append(TopItem(track, weight))
+
+ return seq
+
+ def get_top_tags(self, limit=None, cacheable=True):
+ """Returns the most used tags as a sequence of TopItem objects."""
+
+ # Last.fm has no "limit" parameter for tag.getTopTags
+ # so we need to get all (250) and then limit locally
+ doc = _Request(self, "tag.getTopTags").execute(cacheable)
+
+ seq = []
+ for node in doc.getElementsByTagName("tag"):
+ if limit and len(seq) >= limit:
+ break
+ tag = Tag(_extract(node, "name"), self)
+ weight = _number(_extract(node, "count"))
+ seq.append(TopItem(tag, weight))
+
+ return seq
+
+ def get_geo_events(
+ self, longitude=None, latitude=None, location=None, distance=None,
+ tag=None, festivalsonly=None, limit=None, cacheable=True):
+ """
+ Returns all events in a specific location by country or city name.
+ Parameters:
+ longitude (Optional) : Specifies a longitude value to retrieve events
+ for (service returns nearby events by default)
+ latitude (Optional) : Specifies a latitude value to retrieve events for
+ (service returns nearby events by default)
+ location (Optional) : Specifies a location to retrieve events for
+ (service returns nearby events by default)
+ distance (Optional) : Find events within a specified radius
+ (in kilometres)
+ tag (Optional) : Specifies a tag to filter by.
+ festivalsonly[0|1] (Optional) : Whether only festivals should be
+ returned, or all events.
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 10.
+ """
+
+ params = {}
+
+ if longitude:
+ params["long"] = longitude
+ if latitude:
+ params["lat"] = latitude
+ if location:
+ params["location"] = location
+ if limit:
+ params["limit"] = limit
+ if distance:
+ params["distance"] = distance
+ if tag:
+ params["tag"] = tag
+ if festivalsonly:
+ params["festivalsonly"] = 1
+ elif not festivalsonly:
+ params["festivalsonly"] = 0
+
+ doc = _Request(self, "geo.getEvents", params).execute(cacheable)
+
+ return _extract_events_from_doc(doc, self)
+
+ def get_metro_weekly_chart_dates(self, cacheable=True):
+ """
+ Returns a list of From and To tuples for the available metro charts.
+ """
+
+ doc = _Request(self, "geo.getMetroWeeklyChartlist").execute(cacheable)
+
+ seq = []
+ for node in doc.getElementsByTagName("chart"):
+ seq.append((node.getAttribute("from"), node.getAttribute("to")))
+
+ return seq
+
+ def get_metros(self, country=None, cacheable=True):
+ """
+ Get a list of valid countries and metros for use in the other
+ webservices.
+ Parameters:
+ country (Optional) : Optionally restrict the results to those Metros
+ from a particular country, as defined by the ISO 3166-1 country
+ names standard.
+ """
+ params = {}
+
+ if country:
+ params["country"] = country
+
+ doc = _Request(self, "geo.getMetros", params).execute(cacheable)
+
+ metros = doc.getElementsByTagName("metro")
+ seq = []
+
+ for metro in metros:
+ name = _extract(metro, "name")
+ country = _extract(metro, "country")
+
+ seq.append(Metro(name, country, self))
+
+ return seq
+
+ def get_geo_top_artists(self, country, limit=None, cacheable=True):
+ """Get the most popular artists on Last.fm by country.
+ Parameters:
+ country (Required) : A country name, as defined by the ISO 3166-1
+ country names standard.
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ params = {"country": country}
+
+ if limit:
+ params["limit"] = limit
+
+ doc = _Request(self, "geo.getTopArtists", params).execute(cacheable)
+
+ return _extract_top_artists(doc, self)
+
+ def get_geo_top_tracks(
+ self, country, location=None, limit=None, cacheable=True):
+ """Get the most popular tracks on Last.fm last week by country.
+ Parameters:
+ country (Required) : A country name, as defined by the ISO 3166-1
+ country names standard
+ location (Optional) : A metro name, to fetch the charts for
+ (must be within the country specified)
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ params = {"country": country}
+
+ if location:
+ params["location"] = location
+ if limit:
+ params["limit"] = limit
+
+ doc = _Request(self, "geo.getTopTracks", params).execute(cacheable)
+
+ tracks = doc.getElementsByTagName("track")
+ seq = []
+
+ for track in tracks:
+ title = _extract(track, "name")
+ artist = _extract(track, "name", 1)
+ listeners = _extract(track, "listeners")
+
+ seq.append(TopItem(Track(artist, title, self), listeners))
+
+ return seq
+
+ def enable_proxy(self, host, port):
+ """Enable a default web proxy"""
+
+ self.proxy = [host, _number(port)]
+ self.proxy_enabled = True
+
+ def disable_proxy(self):
+ """Disable using the web proxy"""
+
+ self.proxy_enabled = False
+
+ def is_proxy_enabled(self):
+ """Returns True if a web proxy is enabled."""
+
+ return self.proxy_enabled
+
+ def _get_proxy(self):
+ """Returns proxy details."""
+
+ return self.proxy
+
+ def enable_rate_limit(self):
+ """Enables rate limiting for this network"""
+ self.limit_rate = True
+
+ def disable_rate_limit(self):
+ """Disables rate limiting for this network"""
+ self.limit_rate = False
+
+ def is_rate_limited(self):
+ """Return True if web service calls are rate limited"""
+ return self.limit_rate
+
+ def enable_caching(self, file_path=None):
+ """Enables caching request-wide for all cacheable calls.
+
+ * file_path: A file path for the backend storage file. If
+ None set, a temp file would probably be created, according the backend.
+ """
+
+ if not file_path:
+ file_path = tempfile.mktemp(prefix="pylast_tmp_")
+
+ self.cache_backend = _ShelfCacheBackend(file_path)
+
+ def disable_caching(self):
+ """Disables all caching features."""
+
+ self.cache_backend = None
+
+ def is_caching_enabled(self):
+ """Returns True if caching is enabled."""
+
+ return not (self.cache_backend is None)
+
+ def _get_cache_backend(self):
+
+ return self.cache_backend
+
+ def search_for_album(self, album_name):
+ """Searches for an album by its name. Returns a AlbumSearch object.
+ Use get_next_page() to retrieve sequences of results."""
+
+ return AlbumSearch(album_name, self)
+
+ def search_for_artist(self, artist_name):
+ """Searches of an artist by its name. Returns a ArtistSearch object.
+ Use get_next_page() to retrieve sequences of results."""
+
+ return ArtistSearch(artist_name, self)
+
+ def search_for_tag(self, tag_name):
+ """Searches of a tag by its name. Returns a TagSearch object.
+ Use get_next_page() to retrieve sequences of results."""
+
+ return TagSearch(tag_name, self)
+
+ def search_for_track(self, artist_name, track_name):
+ """Searches of a track by its name and its artist. Set artist to an
+ empty string if not available.
+ Returns a TrackSearch object.
+ Use get_next_page() to retrieve sequences of results."""
+
+ return TrackSearch(artist_name, track_name, self)
+
+ def search_for_venue(self, venue_name, country_name):
+ """Searches of a venue by its name and its country. Set country_name to
+ an empty string if not available.
+ Returns a VenueSearch object.
+ Use get_next_page() to retrieve sequences of results."""
+
+ return VenueSearch(venue_name, country_name, self)
+
+ def get_track_by_mbid(self, mbid):
+ """Looks up a track by its MusicBrainz ID"""
+
+ params = {"mbid": mbid}
+
+ doc = _Request(self, "track.getInfo", params).execute(True)
+
+ return Track(_extract(doc, "name", 1), _extract(doc, "name"), self)
+
+ def get_artist_by_mbid(self, mbid):
+ """Loooks up an artist by its MusicBrainz ID"""
+
+ params = {"mbid": mbid}
+
+ doc = _Request(self, "artist.getInfo", params).execute(True)
+
+ return Artist(_extract(doc, "name"), self)
+
+ def get_album_by_mbid(self, mbid):
+ """Looks up an album by its MusicBrainz ID"""
+
+ params = {"mbid": mbid}
+
+ doc = _Request(self, "album.getInfo", params).execute(True)
+
+ return Album(_extract(doc, "artist"), _extract(doc, "name"), self)
+
+ def update_now_playing(
+ self, artist, title, album=None, album_artist=None,
+ duration=None, track_number=None, mbid=None, context=None):
+ """
+ Used to notify Last.fm that a user has started listening to a track.
+
+ Parameters:
+ artist (Required) : The artist name
+ title (Required) : The track title
+ album (Optional) : The album name.
+ album_artist (Optional) : The album artist - if this differs
+ from the track artist.
+ duration (Optional) : The length of the track in seconds.
+ track_number (Optional) : The track number of the track on the
+ album.
+ mbid (Optional) : The MusicBrainz Track ID.
+ context (Optional) : Sub-client version
+ (not public, only enabled for certain API keys)
+ """
+
+ params = {"track": title, "artist": artist}
+
+ if album:
+ params["album"] = album
+ if album_artist:
+ params["albumArtist"] = album_artist
+ if context:
+ params["context"] = context
+ if track_number:
+ params["trackNumber"] = track_number
+ if mbid:
+ params["mbid"] = mbid
+ if duration:
+ params["duration"] = duration
+
+ _Request(self, "track.updateNowPlaying", params).execute()
+
+ def scrobble(
+ self, artist, title, timestamp, album=None, album_artist=None,
+ track_number=None, duration=None, stream_id=None, context=None,
+ mbid=None):
+
+ """Used to add a track-play to a user's profile.
+
+ Parameters:
+ artist (Required) : The artist name.
+ title (Required) : The track name.
+ timestamp (Required) : The time the track started playing, in UNIX
+ timestamp format (integer number of seconds since 00:00:00,
+ January 1st 1970 UTC). This must be in the UTC time zone.
+ album (Optional) : The album name.
+ album_artist (Optional) : The album artist - if this differs from
+ the track artist.
+ context (Optional) : Sub-client version (not public, only enabled
+ for certain API keys)
+ stream_id (Optional) : The stream id for this track received from
+ the radio.getPlaylist service.
+ track_number (Optional) : The track number of the track on the
+ album.
+ mbid (Optional) : The MusicBrainz Track ID.
+ duration (Optional) : The length of the track in seconds.
+ """
+
+ return self.scrobble_many(({
+ "artist": artist, "title": title, "timestamp": timestamp,
+ "album": album, "album_artist": album_artist,
+ "track_number": track_number, "duration": duration,
+ "stream_id": stream_id, "context": context, "mbid": mbid},))
+
+ def scrobble_many(self, tracks):
+ """
+ Used to scrobble a batch of tracks at once. The parameter tracks is a
+ sequence of dicts per track containing the keyword arguments as if
+ passed to the scrobble() method.
+ """
+
+ tracks_to_scrobble = tracks[:50]
+ if len(tracks) > 50:
+ remaining_tracks = tracks[50:]
+ else:
+ remaining_tracks = None
+
+ params = {}
+ for i in range(len(tracks_to_scrobble)):
+
+ params["artist[%d]" % i] = tracks_to_scrobble[i]["artist"]
+ params["track[%d]" % i] = tracks_to_scrobble[i]["title"]
+
+ additional_args = (
+ "timestamp", "album", "album_artist", "context",
+ "stream_id", "track_number", "mbid", "duration")
+ args_map_to = { # so friggin lazy
+ "album_artist": "albumArtist",
+ "track_number": "trackNumber",
+ "stream_id": "streamID"}
+
+ for arg in additional_args:
+
+ if arg in tracks_to_scrobble[i] and tracks_to_scrobble[i][arg]:
+ if arg in args_map_to:
+ maps_to = args_map_to[arg]
+ else:
+ maps_to = arg
+
+ params[
+ "%s[%d]" % (maps_to, i)] = tracks_to_scrobble[i][arg]
+
+ _Request(self, "track.scrobble", params).execute()
+
+ if remaining_tracks:
+ self.scrobble_many(remaining_tracks)
+
+ def get_play_links(self, link_type, things, cacheable=True):
+ method = link_type + ".getPlaylinks"
+ params = {}
+
+ for i, thing in enumerate(things):
+ if link_type == "artist":
+ params['artist[' + str(i) + ']'] = thing
+ elif link_type == "album":
+ params['artist[' + str(i) + ']'] = thing.artist
+ params['album[' + str(i) + ']'] = thing.title
+ elif link_type == "track":
+ params['artist[' + str(i) + ']'] = thing.artist
+ params['track[' + str(i) + ']'] = thing.title
+
+ doc = _Request(self, method, params).execute(cacheable)
+
+ seq = []
+
+ for node in doc.getElementsByTagName("externalids"):
+ spotify = _extract(node, "spotify")
+ seq.append(spotify)
+
+ return seq
+
+ def get_artist_play_links(self, artists, cacheable=True):
+ return self.get_play_links("artist", artists, cacheable)
+
+ def get_album_play_links(self, albums, cacheable=True):
+ return self.get_play_links("album", albums, cacheable)
+
+ def get_track_play_links(self, tracks, cacheable=True):
+ return self.get_play_links("track", tracks, cacheable)
+
+
+class LastFMNetwork(_Network):
+
+ """A Last.fm network object
+
+ api_key: a provided API_KEY
+ api_secret: a provided API_SECRET
+ session_key: a generated session_key or None
+ username: a username of a valid user
+ password_hash: the output of pylast.md5(password) where password is the
+ user's password
+
+ if username and password_hash were provided and not session_key,
+ session_key will be generated automatically when needed.
+
+ Either a valid session_key or a combination of username and password_hash
+ must be present for scrobbling.
+
+ Most read-only webservices only require an api_key and an api_secret, see
+ about obtaining them from:
+ http://www.last.fm/api/account
+ """
+
+ def __init__(
+ self, api_key="", api_secret="", session_key="", username="",
+ password_hash="", token=""):
+ _Network.__init__(
+ self,
+ name="Last.fm",
+ homepage="http://last.fm",
+ ws_server=("ws.audioscrobbler.com", "/2.0/"),
+ api_key=api_key,
+ api_secret=api_secret,
+ session_key=session_key,
+ submission_server="http://post.audioscrobbler.com:80/",
+ username=username,
+ password_hash=password_hash,
+ token=token,
+ domain_names={
+ DOMAIN_ENGLISH: 'www.last.fm',
+ DOMAIN_GERMAN: 'www.lastfm.de',
+ DOMAIN_SPANISH: 'www.lastfm.es',
+ DOMAIN_FRENCH: 'www.lastfm.fr',
+ DOMAIN_ITALIAN: 'www.lastfm.it',
+ DOMAIN_POLISH: 'www.lastfm.pl',
+ DOMAIN_PORTUGUESE: 'www.lastfm.com.br',
+ DOMAIN_SWEDISH: 'www.lastfm.se',
+ DOMAIN_TURKISH: 'www.lastfm.com.tr',
+ DOMAIN_RUSSIAN: 'www.lastfm.ru',
+ DOMAIN_JAPANESE: 'www.lastfm.jp',
+ DOMAIN_CHINESE: 'cn.last.fm',
+ },
+ urls={
+ "album": "music/%(artist)s/%(album)s",
+ "artist": "music/%(artist)s",
+ "event": "event/%(id)s",
+ "country": "place/%(country_name)s",
+ "playlist": "user/%(user)s/library/playlists/%(appendix)s",
+ "tag": "tag/%(name)s",
+ "track": "music/%(artist)s/_/%(title)s",
+ "group": "group/%(name)s",
+ "user": "user/%(name)s",
+ }
+ )
+
+ def __repr__(self):
+ return "pylast.LastFMNetwork(%s)" % (", ".join(
+ ("'%s'" % self.api_key,
+ "'%s'" % self.api_secret,
+ "'%s'" % self.session_key,
+ "'%s'" % self.username,
+ "'%s'" % self.password_hash)))
+
+
+def get_lastfm_network(
+ api_key="", api_secret="", session_key="", username="",
+ password_hash="", token=""):
+ """
+ Returns a preconfigured _Network object for Last.fm
+
+ api_key: a provided API_KEY
+ api_secret: a provided API_SECRET
+ session_key: a generated session_key or None
+ username: a username of a valid user
+ password_hash: the output of pylast.md5(password) where password is the
+ user's password
+ token: an authentication token to retrieve a session
+
+ if username and password_hash were provided and not session_key,
+ session_key will be generated automatically when needed.
+
+ Either a valid session_key, a combination of username and password_hash,
+ or token must be present for scrobbling.
+
+ Most read-only webservices only require an api_key and an api_secret, see
+ about obtaining them from:
+ http://www.last.fm/api/account
+ """
+
+ _deprecation_warning("Create a LastFMNetwork object instead")
+
+ return LastFMNetwork(
+ api_key, api_secret, session_key, username, password_hash, token)
+
+
+class LibreFMNetwork(_Network):
+ """
+ A preconfigured _Network object for Libre.fm
+
+ api_key: a provided API_KEY
+ api_secret: a provided API_SECRET
+ session_key: a generated session_key or None
+ username: a username of a valid user
+ password_hash: the output of pylast.md5(password) where password is the
+ user's password
+
+ if username and password_hash were provided and not session_key,
+ session_key will be generated automatically when needed.
+ """
+
+ def __init__(
+ self, api_key="", api_secret="", session_key="", username="",
+ password_hash=""):
+
+ _Network.__init__(
+ self,
+ name="Libre.fm",
+ homepage="http://libre.fm",
+ ws_server=("libre.fm", "/2.0/"),
+ api_key=api_key,
+ api_secret=api_secret,
+ session_key=session_key,
+ submission_server="http://turtle.libre.fm:80/",
+ username=username,
+ password_hash=password_hash,
+ domain_names={
+ DOMAIN_ENGLISH: "libre.fm",
+ DOMAIN_GERMAN: "libre.fm",
+ DOMAIN_SPANISH: "libre.fm",
+ DOMAIN_FRENCH: "libre.fm",
+ DOMAIN_ITALIAN: "libre.fm",
+ DOMAIN_POLISH: "libre.fm",
+ DOMAIN_PORTUGUESE: "libre.fm",
+ DOMAIN_SWEDISH: "libre.fm",
+ DOMAIN_TURKISH: "libre.fm",
+ DOMAIN_RUSSIAN: "libre.fm",
+ DOMAIN_JAPANESE: "libre.fm",
+ DOMAIN_CHINESE: "libre.fm",
+ },
+ urls={
+ "album": "artist/%(artist)s/album/%(album)s",
+ "artist": "artist/%(artist)s",
+ "event": "event/%(id)s",
+ "country": "place/%(country_name)s",
+ "playlist": "user/%(user)s/library/playlists/%(appendix)s",
+ "tag": "tag/%(name)s",
+ "track": "music/%(artist)s/_/%(title)s",
+ "group": "group/%(name)s",
+ "user": "user/%(name)s",
+ }
+ )
+
+ def __repr__(self):
+ return "pylast.LibreFMNetwork(%s)" % (", ".join(
+ ("'%s'" % self.api_key,
+ "'%s'" % self.api_secret,
+ "'%s'" % self.session_key,
+ "'%s'" % self.username,
+ "'%s'" % self.password_hash)))
+
+
+def get_librefm_network(
+ api_key="", api_secret="", session_key="", username="",
+ password_hash=""):
+ """
+ Returns a preconfigured _Network object for Libre.fm
+
+ api_key: a provided API_KEY
+ api_secret: a provided API_SECRET
+ session_key: a generated session_key or None
+ username: a username of a valid user
+ password_hash: the output of pylast.md5(password) where password is the
+ user's password
+
+ if username and password_hash were provided and not session_key,
+ session_key will be generated automatically when needed.
+ """
+
+ _deprecation_warning(
+ "DeprecationWarning: Create a LibreFMNetwork object instead")
+
+ return LibreFMNetwork(
+ api_key, api_secret, session_key, username, password_hash)
+
+
+class _ShelfCacheBackend(object):
+ """Used as a backend for caching cacheable requests."""
+ def __init__(self, file_path=None):
+ self.shelf = shelve.open(file_path)
+
+ def __iter__(self):
+ return iter(self.shelf.keys())
+
+ def get_xml(self, key):
+ return self.shelf[key]
+
+ def set_xml(self, key, xml_string):
+ self.shelf[key] = xml_string
+
+
+class _Request(object):
+ """Representing an abstract web service operation."""
+
+ def __init__(self, network, method_name, params={}):
+
+ self.network = network
+ self.params = {}
+
+ for key in params:
+ self.params[key] = _unicode(params[key])
+
+ (self.api_key, self.api_secret, self.session_key) = \
+ network._get_ws_auth()
+
+ self.params["api_key"] = self.api_key
+ self.params["method"] = method_name
+
+ if network.is_caching_enabled():
+ self.cache = network._get_cache_backend()
+
+ if self.session_key:
+ self.params["sk"] = self.session_key
+ self.sign_it()
+
+ def sign_it(self):
+ """Sign this request."""
+
+ if "api_sig" not in self.params.keys():
+ self.params['api_sig'] = self._get_signature()
+
+ def _get_signature(self):
+ """
+ Returns a 32-character hexadecimal md5 hash of the signature string.
+ """
+
+ keys = list(self.params.keys())
+
+ keys.sort()
+
+ string = ""
+
+ for name in keys:
+ string += name
+ string += self.params[name]
+
+ string += self.api_secret
+
+ return md5(string)
+
+ def _get_cache_key(self):
+ """
+ The cache key is a string of concatenated sorted names and values.
+ """
+
+ keys = list(self.params.keys())
+ keys.sort()
+
+ cache_key = str()
+
+ for key in keys:
+ if key != "api_sig" and key != "api_key" and key != "sk":
+ cache_key += key + self.params[key]
+
+ return hashlib.sha1(cache_key.encode("utf-8")).hexdigest()
+
+ def _get_cached_response(self):
+ """Returns a file object of the cached response."""
+
+ if not self._is_cached():
+ response = self._download_response()
+ self.cache.set_xml(self._get_cache_key(), response)
+
+ return self.cache.get_xml(self._get_cache_key())
+
+ def _is_cached(self):
+ """Returns True if the request is already in cache."""
+
+ return self._get_cache_key() in self.cache
+
+ def _download_response(self):
+ """Returns a response body string from the server."""
+
+ if self.network.limit_rate:
+ self.network._delay_call()
+
+ data = []
+ for name in self.params.keys():
+ data.append('='.join((
+ name, url_quote_plus(_string(self.params[name])))))
+ data = '&'.join(data)
+
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ 'Accept-Charset': 'utf-8',
+ 'User-Agent': "pylast" + '/' + __version__
+ }
+
+ (HOST_NAME, HOST_SUBDIR) = self.network.ws_server
+
+ if self.network.is_proxy_enabled():
+ if _can_use_ssl_securely():
+ conn = HTTPSConnection(
+ context=SSL_CONTEXT,
+ host=self.network._get_proxy()[0],
+ port=self.network._get_proxy()[1])
+ else:
+ conn = HTTPConnection(
+ host=self.network._get_proxy()[0],
+ port=self.network._get_proxy()[1])
+
+ try:
+ conn.request(
+ method='POST', url="http://" + HOST_NAME + HOST_SUBDIR,
+ body=data, headers=headers)
+ except Exception as e:
+ raise NetworkError(self.network, e)
+
+ else:
+ if _can_use_ssl_securely():
+ conn = HTTPSConnection(
+ context=SSL_CONTEXT,
+ host=HOST_NAME
+ )
+ else:
+ conn = HTTPConnection(
+ host=HOST_NAME
+ )
+
+ try:
+ conn.request(
+ method='POST', url=HOST_SUBDIR, body=data, headers=headers)
+ except Exception as e:
+ raise NetworkError(self.network, e)
+
+ try:
+ response_text = _unicode(conn.getresponse().read())
+ except Exception as e:
+ raise MalformedResponseError(self.network, e)
+
+ response_text = XML_ILLEGAL.sub("?", response_text)
+
+ self._check_response_for_errors(response_text)
+ return response_text
+
+ def execute(self, cacheable=False):
+ """Returns the XML DOM response of the POST Request from the server"""
+
+ if self.network.is_caching_enabled() and cacheable:
+ response = self._get_cached_response()
+ else:
+ response = self._download_response()
+
+ return minidom.parseString(_string(response).replace(
+ "opensearch:", ""))
+
+ def _check_response_for_errors(self, response):
+ """Checks the response for errors and raises one if any exists."""
+
+ try:
+ doc = minidom.parseString(_string(response).replace(
+ "opensearch:", ""))
+ except Exception as e:
+ raise MalformedResponseError(self.network, e)
+
+ e = doc.getElementsByTagName('lfm')[0]
+
+ if e.getAttribute('status') != "ok":
+ e = doc.getElementsByTagName('error')[0]
+ status = e.getAttribute('code')
+ details = e.firstChild.data.strip()
+ raise WSError(self.network, status, details)
+
+
+class SessionKeyGenerator(object):
+ """Methods of generating a session key:
+ 1) Web Authentication:
+ a. network = get_*_network(API_KEY, API_SECRET)
+ b. sg = SessionKeyGenerator(network)
+ c. url = sg.get_web_auth_url()
+ d. Ask the user to open the url and authorize you, and wait for it.
+ e. session_key = sg.get_web_auth_session_key(url)
+ 2) Username and Password Authentication:
+ a. network = get_*_network(API_KEY, API_SECRET)
+ b. username = raw_input("Please enter your username: ")
+ c. password_hash = pylast.md5(raw_input("Please enter your password: ")
+ d. session_key = SessionKeyGenerator(network).get_session_key(username,
+ password_hash)
+
+ A session key's lifetime is infinite, unless the user revokes the rights
+ of the given API Key.
+
+ If you create a Network object with just a API_KEY and API_SECRET and a
+ username and a password_hash, a SESSION_KEY will be automatically generated
+ for that network and stored in it so you don't have to do this manually,
+ unless you want to.
+ """
+
+ def __init__(self, network):
+ self.network = network
+ self.web_auth_tokens = {}
+
+ def _get_web_auth_token(self):
+ """
+ Retrieves a token from the network for web authentication.
+ The token then has to be authorized from getAuthURL before creating
+ session.
+ """
+
+ request = _Request(self.network, 'auth.getToken')
+
+ # default action is that a request is signed only when
+ # a session key is provided.
+ request.sign_it()
+
+ doc = request.execute()
+
+ e = doc.getElementsByTagName('token')[0]
+ return e.firstChild.data
+
+ def get_web_auth_url(self):
+ """
+ The user must open this page, and you first, then
+ call get_web_auth_session_key(url) after that.
+ """
+
+ token = self._get_web_auth_token()
+
+ url = '%(homepage)s/api/auth/?api_key=%(api)s&token=%(token)s' % \
+ {"homepage": self.network.homepage,
+ "api": self.network.api_key, "token": token}
+
+ self.web_auth_tokens[url] = token
+
+ return url
+
+ def get_web_auth_session_key(self, url, token=""):
+ """
+ Retrieves the session key of a web authorization process by its url.
+ """
+
+ if url in self.web_auth_tokens.keys():
+ token = self.web_auth_tokens[url]
+ else:
+ # This will raise a WSError if token is blank or unauthorized
+ token = token
+
+ request = _Request(self.network, 'auth.getSession', {'token': token})
+
+ # default action is that a request is signed only when
+ # a session key is provided.
+ request.sign_it()
+
+ doc = request.execute()
+
+ return doc.getElementsByTagName('key')[0].firstChild.data
+
+ def get_session_key(self, username, password_hash):
+ """
+ Retrieve a session key with a username and a md5 hash of the user's
+ password.
+ """
+
+ params = {
+ "username": username, "authToken": md5(username + password_hash)}
+ request = _Request(self.network, "auth.getMobileSession", params)
+
+ # default action is that a request is signed only when
+ # a session key is provided.
+ request.sign_it()
+
+ doc = request.execute()
+
+ return _extract(doc, "key")
+
+
+TopItem = collections.namedtuple("TopItem", ["item", "weight"])
+SimilarItem = collections.namedtuple("SimilarItem", ["item", "match"])
+LibraryItem = collections.namedtuple(
+ "LibraryItem", ["item", "playcount", "tagcount"])
+PlayedTrack = collections.namedtuple(
+ "PlayedTrack", ["track", "album", "playback_date", "timestamp"])
+LovedTrack = collections.namedtuple(
+ "LovedTrack", ["track", "date", "timestamp"])
+ImageSizes = collections.namedtuple(
+ "ImageSizes", [
+ "original", "large", "largesquare", "medium", "small", "extralarge"])
+Image = collections.namedtuple(
+ "Image", [
+ "title", "url", "dateadded", "format", "owner", "sizes", "votes"])
+Shout = collections.namedtuple(
+ "Shout", ["body", "author", "date"])
+
+
+def _string_output(funct):
+ def r(*args):
+ return _string(funct(*args))
+
+ return r
+
+
+def _pad_list(given_list, desired_length, padding=None):
+ """
+ Pads a list to be of the desired_length.
+ """
+
+ while len(given_list) < desired_length:
+ given_list.append(padding)
+
+ return given_list
+
+
+class _BaseObject(object):
+ """An abstract webservices object."""
+
+ network = None
+
+ def __init__(self, network, ws_prefix):
+ self.network = network
+ self.ws_prefix = ws_prefix
+
+ def _request(self, method_name, cacheable=False, params=None):
+ if not params:
+ params = self._get_params()
+
+ return _Request(self.network, method_name, params).execute(cacheable)
+
+ def _get_params(self):
+ """Returns the most common set of parameters between all objects."""
+
+ return {}
+
+ def __hash__(self):
+ # Convert any ints (or whatever) into strings
+ values = map(six.text_type, self._get_params().values())
+
+ return hash(self.network) + hash(six.text_type(type(self)) + "".join(
+ list(self._get_params().keys()) + list(values)
+ ).lower())
+
+ def _extract_cdata_from_request(self, method_name, tag_name, params):
+ doc = self._request(method_name, True, params)
+
+ return doc.getElementsByTagName(
+ tag_name)[0].firstChild.wholeText.strip()
+
+ def _get_things(
+ self, method, thing, thing_type, params=None, cacheable=True):
+ """Returns a list of the most played thing_types by this thing."""
+
+ doc = self._request(
+ self.ws_prefix + "." + method, cacheable, params)
+
+ seq = []
+ for node in doc.getElementsByTagName(thing):
+ title = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ playcount = _number(_extract(node, "playcount"))
+
+ seq.append(TopItem(
+ thing_type(artist, title, self.network), playcount))
+
+ return seq
+
+ def get_top_fans(self, limit=None, cacheable=True):
+ """Returns a list of the Users who played this the most.
+ # Parameters:
+ * limit int: Max elements.
+ # For Artist/Track
+ """
+
+ doc = self._request(self.ws_prefix + '.getTopFans', cacheable)
+
+ seq = []
+
+ elements = doc.getElementsByTagName('user')
+
+ for element in elements:
+ if limit and len(seq) >= limit:
+ break
+
+ name = _extract(element, 'name')
+ weight = _number(_extract(element, 'weight'))
+
+ seq.append(TopItem(User(name, self.network), weight))
+
+ return seq
+
+ def share(self, users, message=None):
+ """
+ Shares this (sends out recommendations).
+ Parameters:
+ * users [User|str,]: A list that can contain usernames, emails,
+ User objects, or all of them.
+ * message str: A message to include in the recommendation message.
+ Only for Artist/Event/Track.
+ """
+
+ # Last.fm currently accepts a max of 10 recipient at a time
+ while(len(users) > 10):
+ section = users[0:9]
+ users = users[9:]
+ self.share(section, message)
+
+ nusers = []
+ for user in users:
+ if isinstance(user, User):
+ nusers.append(user.get_name())
+ else:
+ nusers.append(user)
+
+ params = self._get_params()
+ recipients = ','.join(nusers)
+ params['recipient'] = recipients
+ if message:
+ params['message'] = message
+
+ self._request(self.ws_prefix + '.share', False, params)
+
+ def get_wiki_published_date(self):
+ """
+ Returns the summary of the wiki.
+ Only for Album/Track.
+ """
+ return self.get_wiki("published")
+
+ def get_wiki_summary(self):
+ """
+ Returns the summary of the wiki.
+ Only for Album/Track.
+ """
+ return self.get_wiki("summary")
+
+ def get_wiki_content(self):
+ """
+ Returns the summary of the wiki.
+ Only for Album/Track.
+ """
+ return self.get_wiki("content")
+
+ def get_wiki(self, section):
+ """
+ Returns a section of the wiki.
+ Only for Album/Track.
+ section can be "content", "summary" or
+ "published" (for published date)
+ """
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ if len(doc.getElementsByTagName("wiki")) == 0:
+ return
+
+ node = doc.getElementsByTagName("wiki")[0]
+
+ return _extract(node, section)
+
+ def get_shouts(self, limit=50, cacheable=False):
+ """
+ Returns a sequence of Shout objects
+ """
+
+ shouts = []
+ for node in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getShouts",
+ cacheable):
+ shouts.append(
+ Shout(
+ _extract(node, "body"),
+ User(_extract(node, "author"), self.network),
+ _extract(node, "date")
+ )
+ )
+ return shouts
+
+
+class _Chartable(object):
+ """Common functions for classes with charts."""
+
+ def __init__(self, ws_prefix):
+ self.ws_prefix = ws_prefix # TODO move to _BaseObject?
+
+ def get_weekly_chart_dates(self):
+ """Returns a list of From and To tuples for the available charts."""
+
+ doc = self._request(self.ws_prefix + ".getWeeklyChartList", True)
+
+ seq = []
+ for node in doc.getElementsByTagName("chart"):
+ seq.append((node.getAttribute("from"), node.getAttribute("to")))
+
+ return seq
+
+ def get_weekly_album_charts(self, from_date=None, to_date=None):
+ """
+ Returns the weekly album charts for the week starting from the
+ from_date value to the to_date value.
+ Only for Group or User.
+ """
+ return self.get_weekly_charts("album", from_date, to_date)
+
+ def get_weekly_artist_charts(self, from_date=None, to_date=None):
+ """
+ Returns the weekly artist charts for the week starting from the
+ from_date value to the to_date value.
+ Only for Group, Tag or User.
+ """
+ return self.get_weekly_charts("artist", from_date, to_date)
+
+ def get_weekly_track_charts(self, from_date=None, to_date=None):
+ """
+ Returns the weekly track charts for the week starting from the
+ from_date value to the to_date value.
+ Only for Group or User.
+ """
+ return self.get_weekly_charts("track", from_date, to_date)
+
+ def get_weekly_charts(self, chart_kind, from_date=None, to_date=None):
+ """
+ Returns the weekly charts for the week starting from the
+ from_date value to the to_date value.
+ chart_kind should be one of "album", "artist" or "track"
+ """
+ method = ".getWeekly" + chart_kind.title() + "Chart"
+ chart_type = eval(chart_kind.title()) # string to type
+
+ params = self._get_params()
+ if from_date and to_date:
+ params["from"] = from_date
+ params["to"] = to_date
+
+ doc = self._request(
+ self.ws_prefix + method, True, params)
+
+ seq = []
+ for node in doc.getElementsByTagName(chart_kind.lower()):
+ item = chart_type(
+ _extract(node, "artist"), _extract(node, "name"), self.network)
+ weight = _number(_extract(node, "playcount"))
+ seq.append(TopItem(item, weight))
+
+ return seq
+
+
+class _Taggable(object):
+ """Common functions for classes with tags."""
+
+ def __init__(self, ws_prefix):
+ self.ws_prefix = ws_prefix # TODO move to _BaseObject
+
+ def add_tags(self, tags):
+ """Adds one or several tags.
+ * tags: A sequence of tag names or Tag objects.
+ """
+
+ for tag in tags:
+ self.add_tag(tag)
+
+ def add_tag(self, tag):
+ """Adds one tag.
+ * tag: a tag name or a Tag object.
+ """
+
+ if isinstance(tag, Tag):
+ tag = tag.get_name()
+
+ params = self._get_params()
+ params['tags'] = tag
+
+ self._request(self.ws_prefix + '.addTags', False, params)
+
+ def remove_tag(self, tag):
+ """Remove a user's tag from this object."""
+
+ if isinstance(tag, Tag):
+ tag = tag.get_name()
+
+ params = self._get_params()
+ params['tag'] = tag
+
+ self._request(self.ws_prefix + '.removeTag', False, params)
+
+ def get_tags(self):
+ """Returns a list of the tags set by the user to this object."""
+
+ # Uncacheable because it can be dynamically changed by the user.
+ params = self._get_params()
+
+ doc = self._request(self.ws_prefix + '.getTags', False, params)
+ tag_names = _extract_all(doc, 'name')
+ tags = []
+ for tag in tag_names:
+ tags.append(Tag(tag, self.network))
+
+ return tags
+
+ def remove_tags(self, tags):
+ """Removes one or several tags from this object.
+ * tags: a sequence of tag names or Tag objects.
+ """
+
+ for tag in tags:
+ self.remove_tag(tag)
+
+ def clear_tags(self):
+ """Clears all the user-set tags. """
+
+ self.remove_tags(*(self.get_tags()))
+
+ def set_tags(self, tags):
+ """Sets this object's tags to only those tags.
+ * tags: a sequence of tag names or Tag objects.
+ """
+
+ c_old_tags = []
+ old_tags = []
+ c_new_tags = []
+ new_tags = []
+
+ to_remove = []
+ to_add = []
+
+ tags_on_server = self.get_tags()
+
+ for tag in tags_on_server:
+ c_old_tags.append(tag.get_name().lower())
+ old_tags.append(tag.get_name())
+
+ for tag in tags:
+ c_new_tags.append(tag.lower())
+ new_tags.append(tag)
+
+ for i in range(0, len(old_tags)):
+ if not c_old_tags[i] in c_new_tags:
+ to_remove.append(old_tags[i])
+
+ for i in range(0, len(new_tags)):
+ if not c_new_tags[i] in c_old_tags:
+ to_add.append(new_tags[i])
+
+ self.remove_tags(to_remove)
+ self.add_tags(to_add)
+
+ def get_top_tags(self, limit=None):
+ """Returns a list of the most frequently used Tags on this object."""
+
+ doc = self._request(self.ws_prefix + '.getTopTags', True)
+
+ elements = doc.getElementsByTagName('tag')
+ seq = []
+
+ for element in elements:
+ tag_name = _extract(element, 'name')
+ tagcount = _extract(element, 'count')
+
+ seq.append(TopItem(Tag(tag_name, self.network), tagcount))
+
+ if limit:
+ seq = seq[:limit]
+
+ return seq
+
+
+class WSError(Exception):
+ """Exception related to the Network web service"""
+
+ def __init__(self, network, status, details):
+ self.status = status
+ self.details = details
+ self.network = network
+
+ @_string_output
+ def __str__(self):
+ return self.details
+
+ def get_id(self):
+ """Returns the exception ID, from one of the following:
+ STATUS_INVALID_SERVICE = 2
+ STATUS_INVALID_METHOD = 3
+ STATUS_AUTH_FAILED = 4
+ STATUS_INVALID_FORMAT = 5
+ STATUS_INVALID_PARAMS = 6
+ STATUS_INVALID_RESOURCE = 7
+ STATUS_TOKEN_ERROR = 8
+ STATUS_INVALID_SK = 9
+ STATUS_INVALID_API_KEY = 10
+ STATUS_OFFLINE = 11
+ STATUS_SUBSCRIBERS_ONLY = 12
+ STATUS_TOKEN_UNAUTHORIZED = 14
+ STATUS_TOKEN_EXPIRED = 15
+ """
+
+ return self.status
+
+
+class MalformedResponseError(Exception):
+ """Exception conveying a malformed response from the music network."""
+
+ def __init__(self, network, underlying_error):
+ self.network = network
+ self.underlying_error = underlying_error
+
+ def __str__(self):
+ return "Malformed response from {}. Underlying error: {}".format(
+ self.network.name, str(self.underlying_error))
+
+
+class NetworkError(Exception):
+ """Exception conveying a problem in sending a request to Last.fm"""
+
+ def __init__(self, network, underlying_error):
+ self.network = network
+ self.underlying_error = underlying_error
+
+ def __str__(self):
+ return "NetworkError: %s" % str(self.underlying_error)
+
+
+class _Opus(_BaseObject, _Taggable):
+ """An album or track."""
+
+ artist = None
+ title = None
+ username = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, artist, title, network, ws_prefix, username=None):
+ """
+ Create an opus instance.
+ # Parameters:
+ * artist: An artist name or an Artist object.
+ * title: The album or track title.
+ * ws_prefix: 'album' or 'track'
+ """
+
+ _BaseObject.__init__(self, network, ws_prefix)
+ _Taggable.__init__(self, ws_prefix)
+
+ if isinstance(artist, Artist):
+ self.artist = artist
+ else:
+ self.artist = Artist(artist, self.network)
+
+ self.title = title
+ self.username = username
+
+ def __repr__(self):
+ return "pylast.%s(%s, %s, %s)" % (
+ self.ws_prefix.title(), repr(self.artist.name),
+ repr(self.title), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return _unicode("%s - %s") % (
+ self.get_artist().get_name(), self.get_title())
+
+ def __eq__(self, other):
+ if type(self) != type(other):
+ return False
+ a = self.get_title().lower()
+ b = other.get_title().lower()
+ c = self.get_artist().get_name().lower()
+ d = other.get_artist().get_name().lower()
+ return (a == b) and (c == d)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _get_params(self):
+ return {
+ 'artist': self.get_artist().get_name(),
+ self.ws_prefix: self.get_title()}
+
+ def get_artist(self):
+ """Returns the associated Artist object."""
+
+ return self.artist
+
+ def get_title(self, properly_capitalized=False):
+ """Returns the artist or track title."""
+ if properly_capitalized:
+ self.title = _extract(
+ self._request(self.ws_prefix + ".getInfo", True), "name")
+
+ return self.title
+
+ def get_name(self, properly_capitalized=False):
+ """Returns the album or track title (alias to get_title())."""
+
+ return self.get_title(properly_capitalized)
+
+ def get_id(self):
+ """Returns the ID on the network."""
+
+ return _extract(
+ self._request(self.ws_prefix + ".getInfo", cacheable=True), "id")
+
+ def get_playcount(self):
+ """Returns the number of plays on the network"""
+
+ return _number(_extract(
+ self._request(
+ self.ws_prefix + ".getInfo", cacheable=True), "playcount"))
+
+ def get_userplaycount(self):
+ """Returns the number of plays by a given username"""
+
+ if not self.username:
+ return
+
+ params = self._get_params()
+ params['username'] = self.username
+
+ doc = self._request(self.ws_prefix + ".getInfo", True, params)
+ return _number(_extract(doc, "userplaycount"))
+
+ def get_listener_count(self):
+ """Returns the number of listeners on the network"""
+
+ return _number(_extract(
+ self._request(
+ self.ws_prefix + ".getInfo", cacheable=True), "listeners"))
+
+ def get_mbid(self):
+ """Returns the MusicBrainz ID of the album or track."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", cacheable=True)
+
+ try:
+ lfm = doc.getElementsByTagName('lfm')[0]
+ opus = next(self._get_children_by_tag_name(lfm, self.ws_prefix))
+ mbid = next(self._get_children_by_tag_name(opus, "mbid"))
+ return mbid.firstChild.nodeValue
+ except StopIteration:
+ return None
+
+ def _get_children_by_tag_name(self, node, tag_name):
+ for child in node.childNodes:
+ if (child.nodeType == child.ELEMENT_NODE and
+ (tag_name == '*' or child.tagName == tag_name)):
+ yield child
+
+
+class Album(_Opus):
+ """An album."""
+
+ __hash__ = _Opus.__hash__
+
+ def __init__(self, artist, title, network, username=None):
+ super(Album, self).__init__(artist, title, network, "album", username)
+
+ def get_release_date(self):
+ """Returns the release date of the album."""
+
+ return _extract(self._request(
+ self.ws_prefix + ".getInfo", cacheable=True), "releasedate")
+
+ def get_cover_image(self, size=COVER_EXTRA_LARGE):
+ """
+ Returns a uri to the cover image
+ size can be one of:
+ COVER_EXTRA_LARGE
+ COVER_LARGE
+ COVER_MEDIUM
+ COVER_SMALL
+ """
+
+ return _extract_all(
+ self._request(
+ self.ws_prefix + ".getInfo", cacheable=True), 'image')[size]
+
+ def get_tracks(self):
+ """Returns the list of Tracks on this album."""
+
+ return _extract_tracks(
+ self._request(
+ self.ws_prefix + ".getInfo", cacheable=True), "tracks")
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the URL of the album or track page on the network.
+ # Parameters:
+ * domain_name str: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ artist = _url_safe(self.get_artist().get_name())
+ title = _url_safe(self.get_title())
+
+ return self.network._get_url(
+ domain_name, self.ws_prefix) % {
+ 'artist': artist, 'album': title}
+
+
+class Artist(_BaseObject, _Taggable):
+ """An artist."""
+
+ name = None
+ username = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, name, network, username=None):
+ """Create an artist object.
+ # Parameters:
+ * name str: The artist's name.
+ """
+
+ _BaseObject.__init__(self, network, 'artist')
+ _Taggable.__init__(self, 'artist')
+
+ self.name = name
+ self.username = username
+
+ def __repr__(self):
+ return "pylast.Artist(%s, %s)" % (
+ repr(self.get_name()), repr(self.network))
+
+ def __unicode__(self):
+ return six.text_type(self.get_name())
+
+ @_string_output
+ def __str__(self):
+ return self.__unicode__()
+
+ def __eq__(self, other):
+ if type(self) is type(other):
+ return self.get_name().lower() == other.get_name().lower()
+ else:
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _get_params(self):
+ return {self.ws_prefix: self.get_name()}
+
+ def get_name(self, properly_capitalized=False):
+ """Returns the name of the artist.
+ If properly_capitalized was asserted then the name would be downloaded
+ overwriting the given one."""
+
+ if properly_capitalized:
+ self.name = _extract(
+ self._request(self.ws_prefix + ".getInfo", True), "name")
+
+ return self.name
+
+ def get_correction(self):
+ """Returns the corrected artist name."""
+
+ return _extract(
+ self._request(self.ws_prefix + ".getCorrection"), "name")
+
+ def get_cover_image(self, size=COVER_MEGA):
+ """
+ Returns a uri to the cover image
+ size can be one of:
+ COVER_MEGA
+ COVER_EXTRA_LARGE
+ COVER_LARGE
+ COVER_MEDIUM
+ COVER_SMALL
+ """
+
+ return _extract_all(
+ self._request(self.ws_prefix + ".getInfo", True), "image")[size]
+
+ def get_playcount(self):
+ """Returns the number of plays on the network."""
+
+ return _number(_extract(
+ self._request(self.ws_prefix + ".getInfo", True), "playcount"))
+
+ def get_userplaycount(self):
+ """Returns the number of plays by a given username"""
+
+ if not self.username:
+ return
+
+ params = self._get_params()
+ params['username'] = self.username
+
+ doc = self._request(self.ws_prefix + ".getInfo", True, params)
+ return _number(_extract(doc, "userplaycount"))
+
+ def get_mbid(self):
+ """Returns the MusicBrainz ID of this artist."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _extract(doc, "mbid")
+
+ def get_listener_count(self):
+ """Returns the number of listeners on the network."""
+
+ if hasattr(self, "listener_count"):
+ return self.listener_count
+ else:
+ self.listener_count = _number(_extract(
+ self._request(self.ws_prefix + ".getInfo", True), "listeners"))
+ return self.listener_count
+
+ def is_streamable(self):
+ """Returns True if the artist is streamable."""
+
+ return bool(_number(_extract(
+ self._request(self.ws_prefix + ".getInfo", True), "streamable")))
+
+ def get_bio(self, section, language=None):
+ """
+ Returns a section of the bio.
+ section can be "content", "summary" or
+ "published" (for published date)
+ """
+ if language:
+ params = self._get_params()
+ params["lang"] = language
+ else:
+ params = None
+
+ return self._extract_cdata_from_request(
+ self.ws_prefix + ".getInfo", section, params)
+
+ def get_bio_published_date(self):
+ """Returns the date on which the artist's biography was published."""
+ return self.get_bio("published")
+
+ def get_bio_summary(self, language=None):
+ """Returns the summary of the artist's biography."""
+ return self.get_bio("summary", language)
+
+ def get_bio_content(self, language=None):
+ """Returns the content of the artist's biography."""
+ return self.get_bio("content", language)
+
+ def get_upcoming_events(self):
+ """Returns a list of the upcoming Events for this artist."""
+
+ doc = self._request(self.ws_prefix + '.getEvents', True)
+
+ return _extract_events_from_doc(doc, self.network)
+
+ def get_similar(self, limit=None):
+ """Returns the similar artists on the network."""
+
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ doc = self._request(self.ws_prefix + '.getSimilar', True, params)
+
+ names = _extract_all(doc, "name")
+ matches = _extract_all(doc, "match")
+
+ artists = []
+ for i in range(0, len(names)):
+ artists.append(SimilarItem(
+ Artist(names[i], self.network), _number(matches[i])))
+
+ return artists
+
+ def get_top_albums(self, limit=None, cacheable=True):
+ """Returns a list of the top albums."""
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ return self._get_things(
+ "getTopAlbums", "album", Album, params, cacheable)
+
+ def get_top_tracks(self, limit=None, cacheable=True):
+ """Returns a list of the most played Tracks by this artist."""
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ return self._get_things(
+ "getTopTracks", "track", Track, params, cacheable)
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the artist page on the network.
+ # Parameters:
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ artist = _url_safe(self.get_name())
+
+ return self.network._get_url(
+ domain_name, "artist") % {'artist': artist}
+
+ def shout(self, message):
+ """
+ Post a shout
+ """
+
+ params = self._get_params()
+ params["message"] = message
+
+ self._request("artist.Shout", False, params)
+
+ def get_band_members(self):
+ """Returns a list of band members or None if unknown."""
+
+ names = None
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ for node in doc.getElementsByTagName("bandmembers"):
+ names = _extract_all(node, "name")
+
+ return names
+
+
+class Event(_BaseObject):
+ """An event."""
+
+ id = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, event_id, network):
+ _BaseObject.__init__(self, network, 'event')
+
+ self.id = event_id
+
+ def __repr__(self):
+ return "pylast.Event(%s, %s)" % (repr(self.id), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return "Event #" + str(self.get_id())
+
+ def __eq__(self, other):
+ if type(self) is type(other):
+ return self.get_id() == other.get_id()
+ else:
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _get_params(self):
+ return {'event': self.get_id()}
+
+ def attend(self, attending_status):
+ """Sets the attending status.
+ * attending_status: The attending status. Possible values:
+ o EVENT_ATTENDING
+ o EVENT_MAYBE_ATTENDING
+ o EVENT_NOT_ATTENDING
+ """
+
+ params = self._get_params()
+ params['status'] = attending_status
+
+ self._request('event.attend', False, params)
+
+ def get_attendees(self):
+ """
+ Get a list of attendees for an event
+ """
+
+ doc = self._request("event.getAttendees", False)
+
+ users = []
+ for name in _extract_all(doc, "name"):
+ users.append(User(name, self.network))
+
+ return users
+
+ def get_id(self):
+ """Returns the id of the event on the network. """
+
+ return self.id
+
+ def get_title(self):
+ """Returns the title of the event. """
+
+ doc = self._request("event.getInfo", True)
+
+ return _extract(doc, "title")
+
+ def get_headliner(self):
+ """Returns the headliner of the event. """
+
+ doc = self._request("event.getInfo", True)
+
+ return Artist(_extract(doc, "headliner"), self.network)
+
+ def get_artists(self):
+ """Returns a list of the participating Artists. """
+
+ doc = self._request("event.getInfo", True)
+ names = _extract_all(doc, "artist")
+
+ artists = []
+ for name in names:
+ artists.append(Artist(name, self.network))
+
+ return artists
+
+ def get_venue(self):
+ """Returns the venue where the event is held."""
+
+ doc = self._request("event.getInfo", True)
+
+ v = doc.getElementsByTagName("venue")[0]
+ venue_id = _number(_extract(v, "id"))
+
+ return Venue(venue_id, self.network, venue_element=v)
+
+ def get_start_date(self):
+ """Returns the date when the event starts."""
+
+ doc = self._request("event.getInfo", True)
+
+ return _extract(doc, "startDate")
+
+ def get_description(self):
+ """Returns the description of the event. """
+
+ doc = self._request("event.getInfo", True)
+
+ return _extract(doc, "description")
+
+ def get_cover_image(self, size=COVER_MEGA):
+ """
+ Returns a uri to the cover image
+ size can be one of:
+ COVER_MEGA
+ COVER_EXTRA_LARGE
+ COVER_LARGE
+ COVER_MEDIUM
+ COVER_SMALL
+ """
+
+ doc = self._request("event.getInfo", True)
+
+ return _extract_all(doc, "image")[size]
+
+ def get_attendance_count(self):
+ """Returns the number of attending people. """
+
+ doc = self._request("event.getInfo", True)
+
+ return _number(_extract(doc, "attendance"))
+
+ def get_review_count(self):
+ """Returns the number of available reviews for this event. """
+
+ doc = self._request("event.getInfo", True)
+
+ return _number(_extract(doc, "reviews"))
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the event page on the network.
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ return self.network._get_url(
+ domain_name, "event") % {'id': self.get_id()}
+
+ def shout(self, message):
+ """
+ Post a shout
+ """
+
+ params = self._get_params()
+ params["message"] = message
+
+ self._request("event.Shout", False, params)
+
+
+class Country(_BaseObject):
+ """A country at Last.fm."""
+
+ name = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, name, network):
+ _BaseObject.__init__(self, network, "geo")
+
+ self.name = name
+
+ def __repr__(self):
+ return "pylast.Country(%s, %s)" % (repr(self.name), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return self.get_name()
+
+ def __eq__(self, other):
+ return self.get_name().lower() == other.get_name().lower()
+
+ def __ne__(self, other):
+ return self.get_name() != other.get_name()
+
+ def _get_params(self): # TODO can move to _BaseObject
+ return {'country': self.get_name()}
+
+ def _get_name_from_code(self, alpha2code):
+ # TODO: Have this function lookup the alpha-2 code and return the
+ # country name.
+
+ return alpha2code
+
+ def get_name(self):
+ """Returns the country name. """
+
+ return self.name
+
+ def get_top_artists(self, limit=None, cacheable=True):
+ """Returns a sequence of the most played artists."""
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ doc = self._request('geo.getTopArtists', cacheable, params)
+
+ return _extract_top_artists(doc, self)
+
+ def get_top_tracks(self, limit=None, cacheable=True):
+ """Returns a sequence of the most played tracks"""
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ return self._get_things(
+ "getTopTracks", "track", Track, params, cacheable)
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the event page on the network.
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ country_name = _url_safe(self.get_name())
+
+ return self.network._get_url(
+ domain_name, "country") % {'country_name': country_name}
+
+
+class Metro(_BaseObject):
+ """A metro at Last.fm."""
+
+ name = None
+ country = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, name, country, network):
+ _BaseObject.__init__(self, network, None)
+
+ self.name = name
+ self.country = country
+
+ def __repr__(self):
+ return "pylast.Metro(%s, %s, %s)" % (
+ repr(self.name), repr(self.country), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return self.get_name() + ", " + self.get_country()
+
+ def __eq__(self, other):
+ return (self.get_name().lower() == other.get_name().lower() and
+ self.get_country().lower() == other.get_country().lower())
+
+ def __ne__(self, other):
+ return (self.get_name() != other.get_name() or
+ self.get_country().lower() != other.get_country().lower())
+
+ def _get_params(self):
+ return {'metro': self.get_name(), 'country': self.get_country()}
+
+ def get_name(self):
+ """Returns the metro name."""
+
+ return self.name
+
+ def get_country(self):
+ """Returns the metro country."""
+
+ return self.country
+
+ def _get_chart(
+ self, method, tag="artist", limit=None, from_date=None,
+ to_date=None, cacheable=True):
+ """Internal helper for getting geo charts."""
+ params = self._get_params()
+ if limit:
+ params["limit"] = limit
+ if from_date and to_date:
+ params["from"] = from_date
+ params["to"] = to_date
+
+ doc = self._request(method, cacheable, params)
+
+ seq = []
+ for node in doc.getElementsByTagName(tag):
+ if tag == "artist":
+ item = Artist(_extract(node, "name"), self.network)
+ elif tag == "track":
+ title = _extract(node, "name")
+ artist = _extract_element_tree(node).get('artist')['name']
+ item = Track(artist, title, self.network)
+ else:
+ return None
+ weight = _number(_extract(node, "listeners"))
+ seq.append(TopItem(item, weight))
+
+ return seq
+
+ def get_artist_chart(
+ self, tag="artist", limit=None, from_date=None, to_date=None,
+ cacheable=True):
+ """Get a chart of artists for a metro.
+ Parameters:
+ from_date (Optional) : Beginning timestamp of the weekly range
+ requested
+ to_date (Optional) : Ending timestamp of the weekly range requested
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ return self._get_chart(
+ "geo.getMetroArtistChart", tag=tag, limit=limit,
+ from_date=from_date, to_date=to_date, cacheable=cacheable)
+
+ def get_hype_artist_chart(
+ self, tag="artist", limit=None, from_date=None, to_date=None,
+ cacheable=True):
+ """Get a chart of hyped (up and coming) artists for a metro.
+ Parameters:
+ from_date (Optional) : Beginning timestamp of the weekly range
+ requested
+ to_date (Optional) : Ending timestamp of the weekly range requested
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ return self._get_chart(
+ "geo.getMetroHypeArtistChart", tag=tag, limit=limit,
+ from_date=from_date, to_date=to_date, cacheable=cacheable)
+
+ def get_unique_artist_chart(
+ self, tag="artist", limit=None, from_date=None, to_date=None,
+ cacheable=True):
+ """Get a chart of the artists which make that metro unique.
+ Parameters:
+ from_date (Optional) : Beginning timestamp of the weekly range
+ requested
+ to_date (Optional) : Ending timestamp of the weekly range requested
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ return self._get_chart(
+ "geo.getMetroUniqueArtistChart", tag=tag, limit=limit,
+ from_date=from_date, to_date=to_date, cacheable=cacheable)
+
+ def get_track_chart(
+ self, tag="track", limit=None, from_date=None, to_date=None,
+ cacheable=True):
+ """Get a chart of tracks for a metro.
+ Parameters:
+ from_date (Optional) : Beginning timestamp of the weekly range
+ requested
+ to_date (Optional) : Ending timestamp of the weekly range requested
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ return self._get_chart(
+ "geo.getMetroTrackChart", tag=tag, limit=limit,
+ from_date=from_date, to_date=to_date, cacheable=cacheable)
+
+ def get_hype_track_chart(
+ self, tag="track", limit=None, from_date=None, to_date=None,
+ cacheable=True):
+ """Get a chart of tracks for a metro.
+ Parameters:
+ from_date (Optional) : Beginning timestamp of the weekly range
+ requested
+ to_date (Optional) : Ending timestamp of the weekly range requested
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ return self._get_chart(
+ "geo.getMetroHypeTrackChart", tag=tag,
+ limit=limit, from_date=from_date, to_date=to_date,
+ cacheable=cacheable)
+
+ def get_unique_track_chart(
+ self, tag="track", limit=None, from_date=None, to_date=None,
+ cacheable=True):
+ """Get a chart of tracks for a metro.
+ Parameters:
+ from_date (Optional) : Beginning timestamp of the weekly range
+ requested
+ to_date (Optional) : Ending timestamp of the weekly range requested
+ limit (Optional) : The number of results to fetch per page.
+ Defaults to 50.
+ """
+ return self._get_chart(
+ "geo.getMetroUniqueTrackChart", tag=tag, limit=limit,
+ from_date=from_date, to_date=to_date, cacheable=cacheable)
+
+
+class Library(_BaseObject):
+ """A user's Last.fm library."""
+
+ user = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, user, network):
+ _BaseObject.__init__(self, network, 'library')
+
+ if isinstance(user, User):
+ self.user = user
+ else:
+ self.user = User(user, self.network)
+
+ self._albums_index = 0
+ self._artists_index = 0
+ self._tracks_index = 0
+
+ def __repr__(self):
+ return "pylast.Library(%s, %s)" % (repr(self.user), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return repr(self.get_user()) + "'s Library"
+
+ def _get_params(self):
+ return {'user': self.user.get_name()}
+
+ def get_user(self):
+ """Returns the user who owns this library."""
+
+ return self.user
+
+ def add_album(self, album):
+ """Add an album to this library."""
+
+ params = self._get_params()
+ params["artist"] = album.get_artist().get_name()
+ params["album"] = album.get_name()
+
+ self._request("library.addAlbum", False, params)
+
+ def remove_album(self, album):
+ """Remove an album from this library."""
+
+ params = self._get_params()
+ params["artist"] = album.get_artist().get_name()
+ params["album"] = album.get_name()
+
+ self._request(self.ws_prefix + ".removeAlbum", False, params)
+
+ def add_artist(self, artist):
+ """Add an artist to this library."""
+
+ params = self._get_params()
+ if type(artist) == str:
+ params["artist"] = artist
+ else:
+ params["artist"] = artist.get_name()
+
+ self._request(self.ws_prefix + ".addArtist", False, params)
+
+ def remove_artist(self, artist):
+ """Remove an artist from this library."""
+
+ params = self._get_params()
+ if type(artist) == str:
+ params["artist"] = artist
+ else:
+ params["artist"] = artist.get_name()
+
+ self._request(self.ws_prefix + ".removeArtist", False, params)
+
+ def add_track(self, track):
+ """Add a track to this library."""
+
+ params = self._get_params()
+ params["track"] = track.get_title()
+
+ self._request(self.ws_prefix + ".addTrack", False, params)
+
+ def get_albums(self, artist=None, limit=50, cacheable=True):
+ """
+ Returns a sequence of Album objects
+ If no artist is specified, it will return all, sorted by decreasing
+ play count.
+ If limit==None it will return all (may take a while)
+ """
+
+ params = self._get_params()
+ if artist:
+ params["artist"] = artist
+
+ seq = []
+ for node in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getAlbums",
+ cacheable,
+ params):
+ name = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ playcount = _number(_extract(node, "playcount"))
+ tagcount = _number(_extract(node, "tagcount"))
+
+ seq.append(LibraryItem(
+ Album(artist, name, self.network), playcount, tagcount))
+
+ return seq
+
+ def get_artists(self, limit=50, cacheable=True):
+ """
+ Returns a sequence of Album objects
+ if limit==None it will return all (may take a while)
+ """
+
+ seq = []
+ for node in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getArtists",
+ cacheable):
+ name = _extract(node, "name")
+
+ playcount = _number(_extract(node, "playcount"))
+ tagcount = _number(_extract(node, "tagcount"))
+
+ seq.append(LibraryItem(
+ Artist(name, self.network), playcount, tagcount))
+
+ return seq
+
+ def get_tracks(self, artist=None, album=None, limit=50, cacheable=True):
+ """
+ Returns a sequence of Album objects
+ If limit==None it will return all (may take a while)
+ """
+
+ params = self._get_params()
+ if artist:
+ params["artist"] = artist
+ if album:
+ params["album"] = album
+
+ seq = []
+ for node in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getTracks",
+ cacheable,
+ params):
+ name = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ playcount = _number(_extract(node, "playcount"))
+ tagcount = _number(_extract(node, "tagcount"))
+
+ seq.append(LibraryItem(
+ Track(artist, name, self.network), playcount, tagcount))
+
+ return seq
+
+ def remove_scrobble(self, artist, title, timestamp):
+ """Remove a scrobble from a user's Last.fm library. Parameters:
+ artist (Required) : The artist that composed the track
+ title (Required) : The name of the track
+ timestamp (Required) : The unix timestamp of the scrobble
+ that you wish to remove
+ """
+
+ params = self._get_params()
+ params["artist"] = artist
+ params["track"] = title
+ params["timestamp"] = timestamp
+
+ self._request(self.ws_prefix + ".removeScrobble", False, params)
+
+
+class Playlist(_BaseObject):
+ """A Last.fm user playlist."""
+
+ id = None
+ user = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, user, playlist_id, network):
+ _BaseObject.__init__(self, network, "playlist")
+
+ if isinstance(user, User):
+ self.user = user
+ else:
+ self.user = User(user, self.network)
+
+ self.id = playlist_id
+
+ @_string_output
+ def __str__(self):
+ return repr(self.user) + "'s playlist # " + repr(self.id)
+
+ def _get_info_node(self):
+ """
+ Returns the node from user.getPlaylists where this playlist's info is.
+ """
+
+ doc = self._request("user.getPlaylists", True)
+
+ for node in doc.getElementsByTagName("playlist"):
+ if _extract(node, "id") == str(self.get_id()):
+ return node
+
+ def _get_params(self):
+ return {'user': self.user.get_name(), 'playlistID': self.get_id()}
+
+ def get_id(self):
+ """Returns the playlist ID."""
+
+ return self.id
+
+ def get_user(self):
+ """Returns the owner user of this playlist."""
+
+ return self.user
+
+ def get_tracks(self):
+ """Returns a list of the tracks on this user playlist."""
+
+ uri = _unicode('lastfm://playlist/%s') % self.get_id()
+
+ return XSPF(uri, self.network).get_tracks()
+
+ def add_track(self, track):
+ """Adds a Track to this Playlist."""
+
+ params = self._get_params()
+ params['artist'] = track.get_artist().get_name()
+ params['track'] = track.get_title()
+
+ self._request('playlist.addTrack', False, params)
+
+ def get_title(self):
+ """Returns the title of this playlist."""
+
+ return _extract(self._get_info_node(), "title")
+
+ def get_creation_date(self):
+ """Returns the creation date of this playlist."""
+
+ return _extract(self._get_info_node(), "date")
+
+ def get_size(self):
+ """Returns the number of tracks in this playlist."""
+
+ return _number(_extract(self._get_info_node(), "size"))
+
+ def get_description(self):
+ """Returns the description of this playlist."""
+
+ return _extract(self._get_info_node(), "description")
+
+ def get_duration(self):
+ """Returns the duration of this playlist in milliseconds."""
+
+ return _number(_extract(self._get_info_node(), "duration"))
+
+ def is_streamable(self):
+ """
+ Returns True if the playlist is streamable.
+ For a playlist to be streamable, it needs at least 45 tracks by 15
+ different artists."""
+
+ if _extract(self._get_info_node(), "streamable") == '1':
+ return True
+ else:
+ return False
+
+ def has_track(self, track):
+ """Checks to see if track is already in the playlist.
+ * track: Any Track object.
+ """
+
+ return track in self.get_tracks()
+
+ def get_cover_image(self, size=COVER_EXTRA_LARGE):
+ """
+ Returns a uri to the cover image
+ size can be one of:
+ COVER_MEGA
+ COVER_EXTRA_LARGE
+ COVER_LARGE
+ COVER_MEDIUM
+ COVER_SMALL
+ """
+
+ return _extract(self._get_info_node(), "image")[size]
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the playlist on the network.
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ english_url = _extract(self._get_info_node(), "url")
+ appendix = english_url[english_url.rfind("/") + 1:]
+
+ return self.network._get_url(domain_name, "playlist") % {
+ 'appendix': appendix, "user": self.get_user().get_name()}
+
+
+class Tag(_BaseObject, _Chartable):
+ """A Last.fm object tag."""
+
+ name = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, name, network):
+ _BaseObject.__init__(self, network, 'tag')
+ _Chartable.__init__(self, 'tag')
+
+ self.name = name
+
+ def __repr__(self):
+ return "pylast.Tag(%s, %s)" % (repr(self.name), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return self.get_name()
+
+ def __eq__(self, other):
+ return self.get_name().lower() == other.get_name().lower()
+
+ def __ne__(self, other):
+ return self.get_name().lower() != other.get_name().lower()
+
+ def _get_params(self):
+ return {self.ws_prefix: self.get_name()}
+
+ def get_name(self, properly_capitalized=False):
+ """Returns the name of the tag. """
+
+ if properly_capitalized:
+ self.name = _extract(
+ self._request(self.ws_prefix + ".getInfo", True), "name")
+
+ return self.name
+
+ def get_similar(self):
+ """Returns the tags similar to this one, ordered by similarity. """
+
+ doc = self._request(self.ws_prefix + '.getSimilar', True)
+
+ seq = []
+ names = _extract_all(doc, 'name')
+ for name in names:
+ seq.append(Tag(name, self.network))
+
+ return seq
+
+ def get_top_albums(self, limit=None, cacheable=True):
+ """Retuns a list of the top albums."""
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ doc = self._request(
+ self.ws_prefix + '.getTopAlbums', cacheable, params)
+
+ return _extract_top_albums(doc, self.network)
+
+ def get_top_tracks(self, limit=None, cacheable=True):
+ """Returns a list of the most played Tracks for this tag."""
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ return self._get_things(
+ "getTopTracks", "track", Track, params, cacheable)
+
+ def get_top_artists(self, limit=None, cacheable=True):
+ """Returns a sequence of the most played artists."""
+
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ doc = self._request(
+ self.ws_prefix + '.getTopArtists', cacheable, params)
+
+ return _extract_top_artists(doc, self.network)
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the tag page on the network.
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ name = _url_safe(self.get_name())
+
+ return self.network._get_url(domain_name, "tag") % {'name': name}
+
+
+class Track(_Opus):
+ """A Last.fm track."""
+
+ __hash__ = _Opus.__hash__
+
+ def __init__(self, artist, title, network, username=None):
+ super(Track, self).__init__(artist, title, network, "track", username)
+
+ def get_correction(self):
+ """Returns the corrected track name."""
+
+ return _extract(
+ self._request(self.ws_prefix + ".getCorrection"), "name")
+
+ def get_duration(self):
+ """Returns the track duration."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _number(_extract(doc, "duration"))
+
+ def get_userloved(self):
+ """Whether the user loved this track"""
+
+ if not self.username:
+ return
+
+ params = self._get_params()
+ params['username'] = self.username
+
+ doc = self._request(self.ws_prefix + ".getInfo", True, params)
+ loved = _number(_extract(doc, "userloved"))
+ return bool(loved)
+
+ def is_streamable(self):
+ """Returns True if the track is available at Last.fm."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+ return _extract(doc, "streamable") == "1"
+
+ def is_fulltrack_available(self):
+ """Returns True if the fulltrack is available for streaming."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+ return doc.getElementsByTagName(
+ "streamable")[0].getAttribute("fulltrack") == "1"
+
+ def get_album(self):
+ """Returns the album object of this track."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ albums = doc.getElementsByTagName("album")
+
+ if len(albums) == 0:
+ return
+
+ node = doc.getElementsByTagName("album")[0]
+ return Album(
+ _extract(node, "artist"), _extract(node, "title"), self.network)
+
+ def love(self):
+ """Adds the track to the user's loved tracks. """
+
+ self._request(self.ws_prefix + '.love')
+
+ def unlove(self):
+ """Remove the track to the user's loved tracks. """
+
+ self._request(self.ws_prefix + '.unlove')
+
+ def ban(self):
+ """Ban this track from ever playing on the radio. """
+
+ self._request(self.ws_prefix + '.ban')
+
+ def get_similar(self):
+ """
+ Returns similar tracks for this track on the network,
+ based on listening data.
+ """
+
+ doc = self._request(self.ws_prefix + '.getSimilar', True)
+
+ seq = []
+ for node in doc.getElementsByTagName(self.ws_prefix):
+ title = _extract(node, 'name')
+ artist = _extract(node, 'name', 1)
+ match = _number(_extract(node, "match"))
+
+ seq.append(SimilarItem(Track(artist, title, self.network), match))
+
+ return seq
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the URL of the album or track page on the network.
+ # Parameters:
+ * domain_name str: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ artist = _url_safe(self.get_artist().get_name())
+ title = _url_safe(self.get_title())
+
+ return self.network._get_url(
+ domain_name, self.ws_prefix) % {
+ 'artist': artist, 'title': title}
+
+
+class Group(_BaseObject, _Chartable):
+ """A Last.fm group."""
+
+ name = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, name, network):
+ _BaseObject.__init__(self, network, 'group')
+ _Chartable.__init__(self, 'group')
+
+ self.name = name
+
+ def __repr__(self):
+ return "pylast.Group(%s, %s)" % (repr(self.name), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return self.get_name()
+
+ def __eq__(self, other):
+ return self.get_name().lower() == other.get_name().lower()
+
+ def __ne__(self, other):
+ return self.get_name() != other.get_name()
+
+ def _get_params(self):
+ return {self.ws_prefix: self.get_name()}
+
+ def get_name(self):
+ """Returns the group name. """
+ return self.name
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the group page on the network.
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ name = _url_safe(self.get_name())
+
+ return self.network._get_url(domain_name, "group") % {'name': name}
+
+ def get_members(self, limit=50, cacheable=False):
+ """
+ Returns a sequence of User objects
+ if limit==None it will return all
+ """
+
+ nodes = _collect_nodes(
+ limit, self, self.ws_prefix + ".getMembers", cacheable)
+
+ users = []
+
+ for node in nodes:
+ users.append(User(_extract(node, "name"), self.network))
+
+ return users
+
+
+class XSPF(_BaseObject):
+ "A Last.fm XSPF playlist."""
+
+ uri = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, uri, network):
+ _BaseObject.__init__(self, network, None)
+
+ self.uri = uri
+
+ def _get_params(self):
+ return {'playlistURL': self.get_uri()}
+
+ @_string_output
+ def __str__(self):
+ return self.get_uri()
+
+ def __eq__(self, other):
+ return self.get_uri() == other.get_uri()
+
+ def __ne__(self, other):
+ return self.get_uri() != other.get_uri()
+
+ def get_uri(self):
+ """Returns the Last.fm playlist URI. """
+
+ return self.uri
+
+ def get_tracks(self):
+ """Returns the tracks on this playlist."""
+
+ doc = self._request('playlist.fetch', True)
+
+ seq = []
+ for node in doc.getElementsByTagName('track'):
+ title = _extract(node, 'title')
+ artist = _extract(node, 'creator')
+
+ seq.append(Track(artist, title, self.network))
+
+ return seq
+
+
+class User(_BaseObject, _Chartable):
+ """A Last.fm user."""
+
+ name = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, user_name, network):
+ _BaseObject.__init__(self, network, 'user')
+ _Chartable.__init__(self, 'user')
+
+ self.name = user_name
+
+ self._past_events_index = 0
+ self._recommended_events_index = 0
+ self._recommended_artists_index = 0
+
+ def __repr__(self):
+ return "pylast.User(%s, %s)" % (repr(self.name), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return self.get_name()
+
+ def __eq__(self, another):
+ if isinstance(another, User):
+ return self.get_name() == another.get_name()
+ else:
+ return False
+
+ def __ne__(self, another):
+ if isinstance(another, User):
+ return self.get_name() != another.get_name()
+ else:
+ return True
+
+ def _get_params(self):
+ return {self.ws_prefix: self.get_name()}
+
+ def get_name(self, properly_capitalized=False):
+ """Returns the user name."""
+
+ if properly_capitalized:
+ self.name = _extract(
+ self._request(self.ws_prefix + ".getInfo", True), "name")
+
+ return self.name
+
+ def get_upcoming_events(self):
+ """Returns all the upcoming events for this user."""
+
+ doc = self._request(self.ws_prefix + '.getEvents', True)
+
+ return _extract_events_from_doc(doc, self.network)
+
+ def get_artist_tracks(self, artist, cacheable=False):
+ """
+ Get a list of tracks by a given artist scrobbled by this user,
+ including scrobble time.
+ """
+ # Not implemented:
+ # "Can be limited to specific timeranges, defaults to all time."
+
+ params = self._get_params()
+ params['artist'] = artist
+
+ seq = []
+ for track in _collect_nodes(
+ None,
+ self,
+ self.ws_prefix + ".getArtistTracks",
+ cacheable,
+ params):
+ title = _extract(track, "name")
+ artist = _extract(track, "artist")
+ date = _extract(track, "date")
+ album = _extract(track, "album")
+ timestamp = track.getElementsByTagName(
+ "date")[0].getAttribute("uts")
+
+ seq.append(PlayedTrack(
+ Track(artist, title, self.network), album, date, timestamp))
+
+ return seq
+
+ def get_friends(self, limit=50, cacheable=False):
+ """Returns a list of the user's friends. """
+
+ seq = []
+ for node in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getFriends",
+ cacheable):
+ seq.append(User(_extract(node, "name"), self.network))
+
+ return seq
+
+ def get_loved_tracks(self, limit=50, cacheable=True):
+ """
+ Returns this user's loved track as a sequence of LovedTrack objects in
+ reverse order of their timestamp, all the way back to the first track.
+
+ If limit==None, it will try to pull all the available data.
+
+ This method uses caching. Enable caching only if you're pulling a
+ large amount of data.
+
+ Use extract_items() with the return of this function to
+ get only a sequence of Track objects with no playback dates.
+ """
+
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ seq = []
+ for track in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getLovedTracks",
+ cacheable,
+ params):
+ title = _extract(track, "name")
+ artist = _extract(track, "name", 1)
+ date = _extract(track, "date")
+ timestamp = track.getElementsByTagName(
+ "date")[0].getAttribute("uts")
+
+ seq.append(LovedTrack(
+ Track(artist, title, self.network), date, timestamp))
+
+ return seq
+
+ def get_neighbours(self, limit=50, cacheable=True):
+ """Returns a list of the user's friends."""
+
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+
+ doc = self._request(
+ self.ws_prefix + '.getNeighbours', cacheable, params)
+
+ seq = []
+ names = _extract_all(doc, 'name')
+
+ for name in names:
+ seq.append(User(name, self.network))
+
+ return seq
+
+ def get_past_events(self, limit=50, cacheable=False):
+ """
+ Returns a sequence of Event objects
+ if limit==None it will return all
+ """
+
+ seq = []
+ for node in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getPastEvents",
+ cacheable):
+ seq.append(Event(_extract(node, "id"), self.network))
+
+ return seq
+
+ def get_playlists(self):
+ """Returns a list of Playlists that this user owns."""
+
+ doc = self._request(self.ws_prefix + ".getPlaylists", True)
+
+ playlists = []
+ for playlist_id in _extract_all(doc, "id"):
+ playlists.append(
+ Playlist(self.get_name(), playlist_id, self.network))
+
+ return playlists
+
+ def get_now_playing(self):
+ """
+ Returns the currently playing track, or None if nothing is playing.
+ """
+
+ params = self._get_params()
+ params['limit'] = '1'
+
+ doc = self._request(self.ws_prefix + '.getRecentTracks', False, params)
+
+ tracks = doc.getElementsByTagName('track')
+
+ if len(tracks) == 0:
+ return None
+
+ e = tracks[0]
+
+ if not e.hasAttribute('nowplaying'):
+ return None
+
+ artist = _extract(e, 'artist')
+ title = _extract(e, 'name')
+
+ return Track(artist, title, self.network, self.name)
+
+ def get_recent_tracks(self, limit=10, cacheable=True,
+ time_from=None, time_to=None):
+ """
+ Returns this user's played track as a sequence of PlayedTrack objects
+ in reverse order of playtime, all the way back to the first track.
+
+ Parameters:
+ limit : If None, it will try to pull all the available data.
+ from (Optional) : Beginning timestamp of a range - only display
+ scrobbles after this time, in UNIX timestamp format (integer
+ number of seconds since 00:00:00, January 1st 1970 UTC). This
+ must be in the UTC time zone.
+ to (Optional) : End timestamp of a range - only display scrobbles
+ before this time, in UNIX timestamp format (integer number of
+ seconds since 00:00:00, January 1st 1970 UTC). This must be in
+ the UTC time zone.
+
+ This method uses caching. Enable caching only if you're pulling a
+ large amount of data.
+
+ Use extract_items() with the return of this function to
+ get only a sequence of Track objects with no playback dates.
+ """
+
+ params = self._get_params()
+ if limit:
+ params['limit'] = limit
+ if time_from:
+ params['from'] = time_from
+ if time_to:
+ params['to'] = time_to
+
+ seq = []
+ for track in _collect_nodes(
+ limit,
+ self,
+ self.ws_prefix + ".getRecentTracks",
+ cacheable,
+ params):
+
+ if track.hasAttribute('nowplaying'):
+ continue # to prevent the now playing track from sneaking in
+
+ title = _extract(track, "name")
+ artist = _extract(track, "artist")
+ date = _extract(track, "date")
+ album = _extract(track, "album")
+ timestamp = track.getElementsByTagName(
+ "date")[0].getAttribute("uts")
+
+ seq.append(PlayedTrack(
+ Track(artist, title, self.network), album, date, timestamp))
+
+ return seq
+
+ def get_id(self):
+ """Returns the user ID."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _extract(doc, "id")
+
+ def get_language(self):
+ """Returns the language code of the language used by the user."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _extract(doc, "lang")
+
+ def get_country(self):
+ """Returns the name of the country of the user."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ country = _extract(doc, "country")
+
+ if country is None:
+ return None
+ else:
+ return Country(country, self.network)
+
+ def get_age(self):
+ """Returns the user's age."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _number(_extract(doc, "age"))
+
+ def get_gender(self):
+ """Returns the user's gender. Either USER_MALE or USER_FEMALE."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ value = _extract(doc, "gender")
+
+ if value == 'm':
+ return USER_MALE
+ elif value == 'f':
+ return USER_FEMALE
+
+ return None
+
+ def is_subscriber(self):
+ """Returns whether the user is a subscriber or not. True or False."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _extract(doc, "subscriber") == "1"
+
+ def get_playcount(self):
+ """Returns the user's playcount so far."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _number(_extract(doc, "playcount"))
+
+ def get_registered(self):
+ """Returns the user's registration date."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _extract(doc, "registered")
+
+ def get_unixtime_registered(self):
+ """Returns the user's registration date as a UNIX timestamp."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return doc.getElementsByTagName(
+ "registered")[0].getAttribute("unixtime")
+
+ def get_tagged_albums(self, tag, limit=None, cacheable=True):
+ """Returns the albums tagged by a user."""
+
+ params = self._get_params()
+ params['tag'] = tag
+ params['taggingtype'] = 'album'
+ if limit:
+ params['limit'] = limit
+ doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable,
+ params)
+ return _extract_albums(doc, self.network)
+
+ def get_tagged_artists(self, tag, limit=None):
+ """Returns the artists tagged by a user."""
+
+ params = self._get_params()
+ params['tag'] = tag
+ params['taggingtype'] = 'artist'
+ if limit:
+ params["limit"] = limit
+ doc = self._request(self.ws_prefix + '.getpersonaltags', True, params)
+ return _extract_artists(doc, self.network)
+
+ def get_tagged_tracks(self, tag, limit=None, cacheable=True):
+ """Returns the tracks tagged by a user."""
+
+ params = self._get_params()
+ params['tag'] = tag
+ params['taggingtype'] = 'track'
+ if limit:
+ params['limit'] = limit
+ doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable,
+ params)
+ return _extract_tracks(doc, self.network)
+
+ def get_top_albums(
+ self, period=PERIOD_OVERALL, limit=None, cacheable=True):
+ """Returns the top albums played by a user.
+ * period: The period of time. Possible values:
+ o PERIOD_OVERALL
+ o PERIOD_7DAYS
+ o PERIOD_1MONTH
+ o PERIOD_3MONTHS
+ o PERIOD_6MONTHS
+ o PERIOD_12MONTHS
+ """
+
+ params = self._get_params()
+ params['period'] = period
+ if limit:
+ params['limit'] = limit
+
+ doc = self._request(
+ self.ws_prefix + '.getTopAlbums', cacheable, params)
+
+ return _extract_top_albums(doc, self.network)
+
+ def get_top_artists(self, period=PERIOD_OVERALL, limit=None):
+ """Returns the top artists played by a user.
+ * period: The period of time. Possible values:
+ o PERIOD_OVERALL
+ o PERIOD_7DAYS
+ o PERIOD_1MONTH
+ o PERIOD_3MONTHS
+ o PERIOD_6MONTHS
+ o PERIOD_12MONTHS
+ """
+
+ params = self._get_params()
+ params['period'] = period
+ if limit:
+ params["limit"] = limit
+
+ doc = self._request(self.ws_prefix + '.getTopArtists', True, params)
+
+ return _extract_top_artists(doc, self.network)
+
+ def get_top_tags(self, limit=None, cacheable=True):
+ """
+ Returns a sequence of the top tags used by this user with their counts
+ as TopItem objects.
+ * limit: The limit of how many tags to return.
+ * cacheable: Whether to cache results.
+ """
+
+ params = self._get_params()
+ if limit:
+ params["limit"] = limit
+
+ doc = self._request(self.ws_prefix + ".getTopTags", cacheable, params)
+
+ seq = []
+ for node in doc.getElementsByTagName("tag"):
+ seq.append(TopItem(
+ Tag(_extract(node, "name"), self.network),
+ _extract(node, "count")))
+
+ return seq
+
+ def get_top_tracks(
+ self, period=PERIOD_OVERALL, limit=None, cacheable=True):
+ """Returns the top tracks played by a user.
+ * period: The period of time. Possible values:
+ o PERIOD_OVERALL
+ o PERIOD_7DAYS
+ o PERIOD_1MONTH
+ o PERIOD_3MONTHS
+ o PERIOD_6MONTHS
+ o PERIOD_12MONTHS
+ """
+
+ params = self._get_params()
+ params['period'] = period
+ if limit:
+ params['limit'] = limit
+
+ return self._get_things(
+ "getTopTracks", "track", Track, params, cacheable)
+
+ def compare_with_user(self, user, shared_artists_limit=None):
+ """
+ Compare this user with another Last.fm user.
+ Returns a sequence:
+ (tasteometer_score, (shared_artist1, shared_artist2, ...))
+ user: A User object or a username string/unicode object.
+ """
+
+ if isinstance(user, User):
+ user = user.get_name()
+
+ params = self._get_params()
+ if shared_artists_limit:
+ params['limit'] = shared_artists_limit
+ params['type1'] = 'user'
+ params['type2'] = 'user'
+ params['value1'] = self.get_name()
+ params['value2'] = user
+
+ doc = self._request('tasteometer.compare', False, params)
+
+ score = _extract(doc, 'score')
+
+ artists = doc.getElementsByTagName('artists')[0]
+ shared_artists_names = _extract_all(artists, 'name')
+
+ shared_artists_seq = []
+
+ for name in shared_artists_names:
+ shared_artists_seq.append(Artist(name, self.network))
+
+ return (score, shared_artists_seq)
+
+ def get_image(self):
+ """Returns the user's avatar."""
+
+ doc = self._request(self.ws_prefix + ".getInfo", True)
+
+ return _extract(doc, "image")
+
+ def get_url(self, domain_name=DOMAIN_ENGLISH):
+ """Returns the url of the user page on the network.
+ * domain_name: The network's language domain. Possible values:
+ o DOMAIN_ENGLISH
+ o DOMAIN_GERMAN
+ o DOMAIN_SPANISH
+ o DOMAIN_FRENCH
+ o DOMAIN_ITALIAN
+ o DOMAIN_POLISH
+ o DOMAIN_PORTUGUESE
+ o DOMAIN_SWEDISH
+ o DOMAIN_TURKISH
+ o DOMAIN_RUSSIAN
+ o DOMAIN_JAPANESE
+ o DOMAIN_CHINESE
+ """
+
+ name = _url_safe(self.get_name())
+
+ return self.network._get_url(domain_name, "user") % {'name': name}
+
+ def get_library(self):
+ """Returns the associated Library object. """
+
+ return Library(self, self.network)
+
+ def shout(self, message):
+ """
+ Post a shout
+ """
+
+ params = self._get_params()
+ params["message"] = message
+
+ self._request(self.ws_prefix + ".Shout", False, params)
+
+
+class AuthenticatedUser(User):
+ def __init__(self, network):
+ User.__init__(self, "", network)
+
+ def _get_params(self):
+ return {"user": self.get_name()}
+
+ def get_name(self):
+ """Returns the name of the authenticated user."""
+
+ doc = self._request("user.getInfo", True, {"user": ""}) # hack
+
+ self.name = _extract(doc, "name")
+ return self.name
+
+ def get_recommended_events(self, limit=50, cacheable=False):
+ """
+ Returns a sequence of Event objects
+ if limit==None it will return all
+ """
+
+ seq = []
+ for node in _collect_nodes(
+ limit, self, "user.getRecommendedEvents", cacheable):
+ seq.append(Event(_extract(node, "id"), self.network))
+
+ return seq
+
+ def get_recommended_artists(self, limit=50, cacheable=False):
+ """
+ Returns a sequence of Artist objects
+ if limit==None it will return all
+ """
+
+ seq = []
+ for node in _collect_nodes(
+ limit, self, "user.getRecommendedArtists", cacheable):
+ seq.append(Artist(_extract(node, "name"), self.network))
+
+ return seq
+
+
+class _Search(_BaseObject):
+ """An abstract class. Use one of its derivatives."""
+
+ def __init__(self, ws_prefix, search_terms, network):
+ _BaseObject.__init__(self, network, ws_prefix)
+
+ self._ws_prefix = ws_prefix
+ self.search_terms = search_terms
+
+ self._last_page_index = 0
+
+ def _get_params(self):
+ params = {}
+
+ for key in self.search_terms.keys():
+ params[key] = self.search_terms[key]
+
+ return params
+
+ def get_total_result_count(self):
+ """Returns the total count of all the results."""
+
+ doc = self._request(self._ws_prefix + ".search", True)
+
+ return _extract(doc, "opensearch:totalResults")
+
+ def _retrieve_page(self, page_index):
+ """Returns the node of matches to be processed"""
+
+ params = self._get_params()
+ params["page"] = str(page_index)
+ doc = self._request(self._ws_prefix + ".search", True, params)
+
+ return doc.getElementsByTagName(self._ws_prefix + "matches")[0]
+
+ def _retrieve_next_page(self):
+ self._last_page_index += 1
+ return self._retrieve_page(self._last_page_index)
+
+
+class AlbumSearch(_Search):
+ """Search for an album by name."""
+
+ def __init__(self, album_name, network):
+
+ _Search.__init__(self, "album", {"album": album_name}, network)
+
+ def get_next_page(self):
+ """Returns the next page of results as a sequence of Album objects."""
+
+ master_node = self._retrieve_next_page()
+
+ seq = []
+ for node in master_node.getElementsByTagName("album"):
+ seq.append(Album(
+ _extract(node, "artist"),
+ _extract(node, "name"),
+ self.network))
+
+ return seq
+
+
+class ArtistSearch(_Search):
+ """Search for an artist by artist name."""
+
+ def __init__(self, artist_name, network):
+ _Search.__init__(self, "artist", {"artist": artist_name}, network)
+
+ def get_next_page(self):
+ """Returns the next page of results as a sequence of Artist objects."""
+
+ master_node = self._retrieve_next_page()
+
+ seq = []
+ for node in master_node.getElementsByTagName("artist"):
+ artist = Artist(_extract(node, "name"), self.network)
+ artist.listener_count = _number(_extract(node, "listeners"))
+ seq.append(artist)
+
+ return seq
+
+
+class TagSearch(_Search):
+ """Search for a tag by tag name."""
+
+ def __init__(self, tag_name, network):
+
+ _Search.__init__(self, "tag", {"tag": tag_name}, network)
+
+ def get_next_page(self):
+ """Returns the next page of results as a sequence of Tag objects."""
+
+ master_node = self._retrieve_next_page()
+
+ seq = []
+ for node in master_node.getElementsByTagName("tag"):
+ tag = Tag(_extract(node, "name"), self.network)
+ tag.tag_count = _number(_extract(node, "count"))
+ seq.append(tag)
+
+ return seq
+
+
+class TrackSearch(_Search):
+ """
+ Search for a track by track title. If you don't want to narrow the results
+ down by specifying the artist name, set it to empty string.
+ """
+
+ def __init__(self, artist_name, track_title, network):
+
+ _Search.__init__(
+ self,
+ "track",
+ {"track": track_title, "artist": artist_name},
+ network)
+
+ def get_next_page(self):
+ """Returns the next page of results as a sequence of Track objects."""
+
+ master_node = self._retrieve_next_page()
+
+ seq = []
+ for node in master_node.getElementsByTagName("track"):
+ track = Track(
+ _extract(node, "artist"),
+ _extract(node, "name"),
+ self.network)
+ track.listener_count = _number(_extract(node, "listeners"))
+ seq.append(track)
+
+ return seq
+
+
+class VenueSearch(_Search):
+ """
+ Search for a venue by its name. If you don't want to narrow the results
+ down by specifying a country, set it to empty string.
+ """
+
+ def __init__(self, venue_name, country_name, network):
+
+ _Search.__init__(
+ self,
+ "venue",
+ {"venue": venue_name, "country": country_name},
+ network)
+
+ def get_next_page(self):
+ """Returns the next page of results as a sequence of Track objects."""
+
+ master_node = self._retrieve_next_page()
+
+ seq = []
+ for node in master_node.getElementsByTagName("venue"):
+ seq.append(Venue(_extract(node, "id"), self.network))
+
+ return seq
+
+
+class Venue(_BaseObject):
+ """A venue where events are held."""
+
+ # TODO: waiting for a venue.getInfo web service to use.
+ # TODO: As an intermediate use case, can pass the venue DOM element when
+ # using Event.get_venue() to populate the venue info, if the venue.getInfo
+ # API call becomes available this workaround should be removed
+
+ id = None
+ info = None
+ name = None
+ location = None
+ url = None
+
+ __hash__ = _BaseObject.__hash__
+
+ def __init__(self, netword_id, network, venue_element=None):
+ _BaseObject.__init__(self, network, "venue")
+
+ self.id = _number(netword_id)
+ if venue_element is not None:
+ self.info = _extract_element_tree(venue_element)
+ self.name = self.info.get('name')
+ self.url = self.info.get('url')
+ self.location = self.info.get('location')
+
+ def __repr__(self):
+ return "pylast.Venue(%s, %s)" % (repr(self.id), repr(self.network))
+
+ @_string_output
+ def __str__(self):
+ return "Venue #" + str(self.id)
+
+ def __eq__(self, other):
+ return self.get_id() == other.get_id()
+
+ def _get_params(self):
+ return {self.ws_prefix: self.get_id()}
+
+ def get_id(self):
+ """Returns the id of the venue."""
+
+ return self.id
+
+ def get_name(self):
+ """Returns the name of the venue."""
+
+ return self.name
+
+ def get_url(self):
+ """Returns the URL of the venue page."""
+
+ return self.url
+
+ def get_location(self):
+ """Returns the location of the venue (dictionary)."""
+
+ return self.location
+
+ def get_upcoming_events(self):
+ """Returns the upcoming events in this venue."""
+
+ doc = self._request(self.ws_prefix + ".getEvents", True)
+
+ return _extract_events_from_doc(doc, self.network)
+
+ def get_past_events(self):
+ """Returns the past events held in this venue."""
+
+ doc = self._request(self.ws_prefix + ".getEvents", True)
+
+ return _extract_events_from_doc(doc, self.network)
+
+
+def md5(text):
+ """Returns the md5 hash of a string."""
+
+ h = hashlib.md5()
+ h.update(_unicode(text).encode("utf-8"))
+
+ return h.hexdigest()
+
+
+def _unicode(text):
+ if isinstance(text, six.binary_type):
+ return six.text_type(text, "utf-8")
+ elif isinstance(text, six.text_type):
+ return text
+ else:
+ return six.text_type(text)
+
+
+def _string(string):
+ """For Python2 routines that can only process str type."""
+ if isinstance(string, str):
+ return string
+ casted = six.text_type(string)
+ if sys.version_info[0] == 2:
+ casted = casted.encode("utf-8")
+ return casted
+
+
+def cleanup_nodes(doc):
+ """
+ Remove text nodes containing only whitespace
+ """
+ for node in doc.documentElement.childNodes:
+ if node.nodeType == Node.TEXT_NODE and node.nodeValue.isspace():
+ doc.documentElement.removeChild(node)
+ return doc
+
+
+def _collect_nodes(limit, sender, method_name, cacheable, params=None):
+ """
+ Returns a sequence of dom.Node objects about as close to limit as possible
+ """
+
+ if not params:
+ params = sender._get_params()
+
+ nodes = []
+ page = 1
+ end_of_pages = False
+
+ while not end_of_pages and (not limit or (limit and len(nodes) < limit)):
+ params["page"] = str(page)
+ doc = sender._request(method_name, cacheable, params)
+ doc = cleanup_nodes(doc)
+
+ main = doc.documentElement.childNodes[0]
+
+ if main.hasAttribute("totalPages"):
+ total_pages = _number(main.getAttribute("totalPages"))
+ elif main.hasAttribute("totalpages"):
+ total_pages = _number(main.getAttribute("totalpages"))
+ else:
+ raise Exception("No total pages attribute")
+
+ for node in main.childNodes:
+ if not node.nodeType == xml.dom.Node.TEXT_NODE and (
+ not limit or (len(nodes) < limit)):
+ nodes.append(node)
+
+ if page >= total_pages:
+ end_of_pages = True
+
+ page += 1
+
+ return nodes
+
+
+def _extract(node, name, index=0):
+ """Extracts a value from the xml string"""
+
+ nodes = node.getElementsByTagName(name)
+
+ if len(nodes):
+ if nodes[index].firstChild:
+ return _unescape_htmlentity(nodes[index].firstChild.data.strip())
+ else:
+ return None
+
+
+def _extract_element_tree(node):
+ """Extract an element tree into a multi-level dictionary
+
+ NB: If any elements have text nodes as well as nested
+ elements this will ignore the text nodes"""
+
+ def _recurse_build_tree(rootNode, targetDict):
+ """Recursively build a multi-level dict"""
+
+ def _has_child_elements(rootNode):
+ """Check if an element has any nested (child) elements"""
+
+ for node in rootNode.childNodes:
+ if node.nodeType == node.ELEMENT_NODE:
+ return True
+ return False
+
+ for node in rootNode.childNodes:
+ if node.nodeType == node.ELEMENT_NODE:
+ if _has_child_elements(node):
+ targetDict[node.tagName] = {}
+ _recurse_build_tree(node, targetDict[node.tagName])
+ else:
+ val = None if node.firstChild is None else \
+ _unescape_htmlentity(node.firstChild.data.strip())
+ targetDict[node.tagName] = val
+ return targetDict
+
+ return _recurse_build_tree(node, {})
+
+
+def _extract_all(node, name, limit_count=None):
+ """Extracts all the values from the xml string. returning a list."""
+
+ seq = []
+
+ for i in range(0, len(node.getElementsByTagName(name))):
+ if len(seq) == limit_count:
+ break
+
+ seq.append(_extract(node, name, i))
+
+ return seq
+
+
+def _extract_top_artists(doc, network):
+ # TODO Maybe include the _request here too?
+ seq = []
+ for node in doc.getElementsByTagName("artist"):
+ name = _extract(node, "name")
+ playcount = _extract(node, "playcount")
+
+ seq.append(TopItem(Artist(name, network), playcount))
+
+ return seq
+
+
+def _extract_top_albums(doc, network):
+ # TODO Maybe include the _request here too?
+ seq = []
+ for node in doc.getElementsByTagName("album"):
+ name = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ playcount = _extract(node, "playcount")
+
+ seq.append(TopItem(Album(artist, name, network), playcount))
+
+ return seq
+
+
+def _extract_artists(doc, network):
+ seq = []
+ for node in doc.getElementsByTagName("artist"):
+ seq.append(Artist(_extract(node, "name"), network))
+ return seq
+
+
+def _extract_albums(doc, network):
+ seq = []
+ for node in doc.getElementsByTagName("album"):
+ name = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ seq.append(Album(artist, name, network))
+ return seq
+
+
+def _extract_tracks(doc, network):
+ seq = []
+ for node in doc.getElementsByTagName("track"):
+ name = _extract(node, "name")
+ artist = _extract(node, "name", 1)
+ seq.append(Track(artist, name, network))
+ return seq
+
+
+def _extract_events_from_doc(doc, network):
+ events = []
+ for node in doc.getElementsByTagName("event"):
+ events.append(Event(_extract(node, "id"), network))
+ return events
+
+
+def _url_safe(text):
+ """Does all kinds of tricks on a text to make it safe to use in a url."""
+
+ return url_quote_plus(url_quote_plus(_string(text))).lower()
+
+
+def _number(string):
+ """
+ Extracts an int from a string.
+ Returns a 0 if None or an empty string was passed.
+ """
+
+ if not string:
+ return 0
+ elif string == "":
+ return 0
+ else:
+ try:
+ return int(string)
+ except ValueError:
+ return float(string)
+
+
+def _unescape_htmlentity(string):
+
+ # string = _unicode(string)
+
+ mapping = htmlentitydefs.name2codepoint
+ for key in mapping:
+ string = string.replace("&%s;" % key, unichr(mapping[key]))
+
+ return string
+
+
+def extract_items(topitems_or_libraryitems):
+ """
+ Extracts a sequence of items from a sequence of TopItem or
+ LibraryItem objects.
+ """
+
+ seq = []
+ for i in topitems_or_libraryitems:
+ seq.append(i.item)
+
+ return seq
+
+
+class ScrobblingError(Exception):
+ def __init__(self, message):
+ Exception.__init__(self)
+ self.message = message
+
+ @_string_output
+ def __str__(self):
+ return self.message
+
+
+class BannedClientError(ScrobblingError):
+ def __init__(self):
+ ScrobblingError.__init__(
+ self, "This version of the client has been banned")
+
+
+class BadAuthenticationError(ScrobblingError):
+ def __init__(self):
+ ScrobblingError.__init__(self, "Bad authentication token")
+
+
+class BadTimeError(ScrobblingError):
+ def __init__(self):
+ ScrobblingError.__init__(
+ self, "Time provided is not close enough to current time")
+
+
+class BadSessionError(ScrobblingError):
+ def __init__(self):
+ ScrobblingError.__init__(
+ self, "Bad session id, consider re-handshaking")
+
+
+class _ScrobblerRequest(object):
+
+ def __init__(self, url, params, network, request_type="POST"):
+
+ for key in params:
+ params[key] = str(params[key])
+
+ self.params = params
+ self.type = request_type
+ (self.hostname, self.subdir) = url_split_host(url[len("http:"):])
+ self.network = network
+
+ def execute(self):
+ """Returns a string response of this request."""
+
+ if _can_use_ssl_securely():
+ connection = HTTPSConnection(
+ context=SSL_CONTEXT,
+ host=self.hostname
+ )
+ else:
+ connection = HTTPConnection(
+ host=self.hostname
+ )
+
+ data = []
+ for name in self.params.keys():
+ value = url_quote_plus(self.params[name])
+ data.append('='.join((name, value)))
+ data = "&".join(data)
+
+ headers = {
+ "Content-type": "application/x-www-form-urlencoded",
+ "Accept-Charset": "utf-8",
+ "User-Agent": "pylast" + "/" + __version__,
+ "HOST": self.hostname
+ }
+
+ if self.type == "GET":
+ connection.request(
+ "GET", self.subdir + "?" + data, headers=headers)
+ else:
+ connection.request("POST", self.subdir, data, headers)
+ response = _unicode(connection.getresponse().read())
+
+ self._check_response_for_errors(response)
+
+ return response
+
+ def _check_response_for_errors(self, response):
+ """
+ When passed a string response it checks for errors, raising any
+ exceptions as necessary.
+ """
+
+ lines = response.split("\n")
+ status_line = lines[0]
+
+ if status_line == "OK":
+ return
+ elif status_line == "BANNED":
+ raise BannedClientError()
+ elif status_line == "BADAUTH":
+ raise BadAuthenticationError()
+ elif status_line == "BADTIME":
+ raise BadTimeError()
+ elif status_line == "BADSESSION":
+ raise BadSessionError()
+ elif status_line.startswith("FAILED "):
+ reason = status_line[status_line.find("FAILED ") + len("FAILED "):]
+ raise ScrobblingError(reason)
+
+
+class Scrobbler(object):
+ """A class for scrobbling tracks to Last.fm"""
+
+ session_id = None
+ nowplaying_url = None
+ submissions_url = None
+
+ def __init__(self, network, client_id, client_version):
+ self.client_id = client_id
+ self.client_version = client_version
+ self.username = network.username
+ self.password = network.password_hash
+ self.network = network
+
+ def _do_handshake(self):
+ """Handshakes with the server"""
+
+ timestamp = str(int(time.time()))
+
+ if self.password and self.username:
+ token = md5(self.password + timestamp)
+ elif self.network.api_key and self.network.api_secret and \
+ self.network.session_key:
+ if not self.username:
+ self.username = self.network.get_authenticated_user()\
+ .get_name()
+ token = md5(self.network.api_secret + timestamp)
+
+ params = {
+ "hs": "true", "p": "1.2.1", "c": self.client_id,
+ "v": self.client_version, "u": self.username, "t": timestamp,
+ "a": token}
+
+ if self.network.session_key and self.network.api_key:
+ params["sk"] = self.network.session_key
+ params["api_key"] = self.network.api_key
+
+ server = self.network.submission_server
+ response = _ScrobblerRequest(
+ server, params, self.network, "GET").execute().split("\n")
+
+ self.session_id = response[1]
+ self.nowplaying_url = response[2]
+ self.submissions_url = response[3]
+
+ def _get_session_id(self, new=False):
+ """
+ Returns a handshake. If new is true, then it will be requested from
+ the server even if one was cached.
+ """
+
+ if not self.session_id or new:
+ self._do_handshake()
+
+ return self.session_id
+
+ def report_now_playing(
+ self, artist, title, album="", duration="", track_number="",
+ mbid=""):
+
+ _deprecation_warning(
+ "DeprecationWarning: Use Network.update_now_playing(...) instead")
+
+ params = {
+ "s": self._get_session_id(), "a": artist, "t": title,
+ "b": album, "l": duration, "n": track_number, "m": mbid}
+
+ try:
+ _ScrobblerRequest(
+ self.nowplaying_url, params, self.network
+ ).execute()
+ except BadSessionError:
+ self._do_handshake()
+ self.report_now_playing(
+ artist, title, album, duration, track_number, mbid)
+
+ def scrobble(
+ self, artist, title, time_started, source, mode, duration,
+ album="", track_number="", mbid=""):
+ """Scrobble a track. parameters:
+ artist: Artist name.
+ title: Track title.
+ time_started: UTC timestamp of when the track started playing.
+ source: The source of the track
+ SCROBBLE_SOURCE_USER: Chosen by the user
+ (the most common value, unless you have a reason for
+ choosing otherwise, use this).
+ SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST: Non-personalised
+ broadcast (e.g. Shoutcast, BBC Radio 1).
+ SCROBBLE_SOURCE_PERSONALIZED_BROADCAST: Personalised
+ recommendation except Last.fm (e.g. Pandora, Launchcast).
+ SCROBBLE_SOURCE_LASTFM: ast.fm (any mode). In this case, the
+ 5-digit recommendation_key value must be set.
+ SCROBBLE_SOURCE_UNKNOWN: Source unknown.
+ mode: The submission mode
+ SCROBBLE_MODE_PLAYED: The track was played.
+ SCROBBLE_MODE_LOVED: The user manually loved the track
+ (implies a listen)
+ SCROBBLE_MODE_SKIPPED: The track was skipped
+ (Only if source was Last.fm)
+ SCROBBLE_MODE_BANNED: The track was banned
+ (Only if source was Last.fm)
+ duration: Track duration in seconds.
+ album: The album name.
+ track_number: The track number on the album.
+ mbid: MusicBrainz ID.
+ """
+
+ _deprecation_warning(
+ "DeprecationWarning: Use Network.scrobble(...) instead")
+
+ params = {
+ "s": self._get_session_id(),
+ "a[0]": _string(artist),
+ "t[0]": _string(title),
+ "i[0]": str(time_started),
+ "o[0]": source,
+ "r[0]": mode,
+ "l[0]": str(duration),
+ "b[0]": _string(album),
+ "n[0]": track_number,
+ "m[0]": mbid
+ }
+
+ _ScrobblerRequest(self.submissions_url, params, self.network).execute()
+
+ def scrobble_many(self, tracks):
+ """
+ Scrobble several tracks at once.
+
+ tracks: A sequence of a sequence of parameters for each track.
+ The order of parameters is the same as if passed to the
+ scrobble() method.
+ """
+
+ _deprecation_warning(
+ "DeprecationWarning: Use Network.scrobble_many(...) instead")
+
+ remainder = []
+
+ if len(tracks) > 50:
+ remainder = tracks[50:]
+ tracks = tracks[:50]
+
+ params = {"s": self._get_session_id()}
+
+ i = 0
+ for t in tracks:
+ _pad_list(t, 9, "")
+ params["a[%s]" % str(i)] = _string(t[0])
+ params["t[%s]" % str(i)] = _string(t[1])
+ params["i[%s]" % str(i)] = str(t[2])
+ params["o[%s]" % str(i)] = t[3]
+ params["r[%s]" % str(i)] = t[4]
+ params["l[%s]" % str(i)] = str(t[5])
+ params["b[%s]" % str(i)] = _string(t[6])
+ params["n[%s]" % str(i)] = t[7]
+ params["m[%s]" % str(i)] = t[8]
+
+ i += 1
+
+ _ScrobblerRequest(self.submissions_url, params, self.network).execute()
+
+ if remainder:
+ self.scrobble_many(remainder)
+
+# End of file
diff --git a/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.pyo b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.pyo
new file mode 100644
index 000000000..4b87fc96d
Binary files /dev/null and b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo
index d59cb6301..a24375070 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo
index 728c830f0..084919dc5 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo
index bca901e23..9d7b5a775 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo
index a0510472b..6a41b7052 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo
index 176c5db89..c9391e137 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo
index 787c6858c..e3bd6bb05 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo
index ac8872627..2e42d7895 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo
index 561683a85..596d142ab 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo
index 00239aa17..39af363a5 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo
index 4fb155c99..61ba8ffd0 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo
index bb79a1400..a56da536e 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo
index 75feba3b1..1d27dfb19 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo
index 28b3b5b47..a3dfe3fc0 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo
index 0bbc2021f..c71d4052a 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo
index cdf0d5394..508a08b1a 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo
index 1be4f7fe2..c39a57c6d 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo differ
diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo
index 2e9042b1d..2e984238d 100644
Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplecache/LICENSE b/.install/.kodi/addons/script.module.simplecache/LICENSE
new file mode 100644
index 000000000..8dada3eda
--- /dev/null
+++ b/.install/.kodi/addons/script.module.simplecache/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/.install/.kodi/addons/script.module.simplecache/README.md b/.install/.kodi/addons/script.module.simplecache/README.md
new file mode 100644
index 000000000..707a8de70
--- /dev/null
+++ b/.install/.kodi/addons/script.module.simplecache/README.md
@@ -0,0 +1,103 @@
+# script.module.simplecache
+
+[![Codacy Badge](https://api.codacy.com/project/badge/Grade/5e223503667f4a35a791d140f2cb6285)](https://www.codacy.com/app/m-vanderveldt/script-module-simplecache?utm_source=github.com&utm_medium=referral&utm_content=marcelveldt/script.module.simplecache&utm_campaign=badger)
+
+A simple object cache for Kodi addons
+
+
+## Help needed with maintaining !
+I am very busy currently so I do not have a lot of time to work on this project or watch the forums.
+Be aware that this is a community driven project, so feel free to submit PR's yourself to improve the code and/or help others with support on the forums etc. If you're willing to really participate in the development, please contact me so I can give you write access to the repo. I do my best to maintain the project every once in a while, when I have some spare time left.
+Thanks for understanding!
+
+
+## Usage
+
+You can use this python library as module within your own Kodi scripts/addons.
+Just make sure to import it within your addon.xml:
+
+```xml
+
+
+
+```
+
+Now, to use it in your Kodi addon/script, make sure to import it and you can access its methods.
+
+```python
+import simplecache
+
+# instantiate the cache
+_cache = simplecache.SimpleCache()
+
+# get data from cache
+mycache = _cache.get("MyAddon.MyChunkOfData")
+if mycache:
+ my_objects = mycache
+else:
+ # do stuff here
+ my_objects = mymethod()
+
+ # write results in cache
+ _cache.set( "MyAddon.MyChunkOfData", my_objects, expiration=datetime.timedelta(hours=12))
+```
+
+The above example will check the cache for the key "MyAddon.MyChunkOfData". If there is any data (and the cache is not expired) it will be returned as the original object.
+
+If the cache is empty, you perform the usual stuff to get the data and save that to the cache
+
+---------------------------------------------------------------------------
+
+## Available methods
+
+### get(endpoint, checksum="", json=False)
+```
+ Returns the data from the cache for the specified endpoint. Will return None if there is no cache.
+
+ parameters:
+ endpoint --> Your unique reference/key for the cache object. TIP: To prevent clashes with other addons, prefix with your addon ID.
+ checksum --> Optional argument to check for a checksum in the file (Will only work if you store the checksum with the set method). Can be any python object which can be serialized with eval.
+ json --> Optional argument. Default is False. For JSON data it is recommended to switch it to True to avoid Memomy Error exceptions or other issues. If you set the global "data_is_json" bool to True, it will always handle your data as JSON.
+
+
+ Example: _cache.get("MyAddon.MyChunkOfData", checksum=len(myvideos))
+
+ This example will return the data in the cache but only if the length of the list myvideos is the same as whatever is stored as checksum in the cache.
+
+```
+
+### set(endpoint, data, checksum="", expiration=timedelta(days=30), json=False)
+```
+ Stores the data in the cache for the specified endpoint.
+
+ parameters:
+ endpoint --> Your unique reference/key for the cache object. TIP: To prevent clashes with other addons, prefix with your addon ID.
+ data --> Your objectdata. Can be any python object which can be serialized with eval.
+ checksum --> Optional argument to store as checksum in the file. Can be any python object which can be serialized with eval.
+ expiration --> Optional argument to specify the amount of time the data may be cached as python timedelta object. Defaults to 30 days if ommitted.
+ json --> Optional argument. Default is False. For JSON data it is recommended to switch it to True to avoid Memomy Error exceptions or other issues. If you set the global "data_is_json" bool to True, it will always handle your data as JSON.
+
+ Example: _cache.set("MyAddon.MyGreatChunkOfData", my_objects, checksum=len(myvideos), expiration=timedelta(hours=1))
+
+ This example will store the data in the cache which will expire after 1 hours. Additionally a checksum is stored in the cache object.
+
+```
+
+## Notes
+
+1) By default objects will be stored both in memory and on disk, it is however possible to override that:
+```
+ _cache.enable_mem_cache = False
+```
+In that case, objects will only be stored on disk (database)
+
+
+2) Cache objects are auto cleaned from memory after 2 hours to prevent unused objects loaded in memory.
+
+
+3) Cache objects on disk are stored in a self-maintaining sqllite database. Expired objects will be auto cleaned from the database.
+
+4) If your data is only JSON you can set a global bool to handle all input/ouput requests as JSON. This is recommended to avoid problems and issues on slower devices.
+```
+ _cache.data_is_json = True
+```
diff --git a/.install/.kodi/addons/script.module.simplecache/addon.xml b/.install/.kodi/addons/script.module.simplecache/addon.xml
new file mode 100644
index 000000000..8ec846451
--- /dev/null
+++ b/.install/.kodi/addons/script.module.simplecache/addon.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+ Provides a simple file- and memory based cache for Kodi addons
+ Provides a simple file- and memory based cache for Kodi addons. Based on the original work of Marcelveldt.
+ all
+ Apache 2.0
+
+ https://github.com/sualfred/script.module.simplecache
+
+ resources/icon.png
+
+
+
diff --git a/.install/.kodi/addons/script.module.simplecache/lib/simplecache.py b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.py
new file mode 100644
index 000000000..0f52c553a
--- /dev/null
+++ b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.py
@@ -0,0 +1,309 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+'''provides a simple stateless caching system for Kodi addons and plugins'''
+
+import sys
+import xbmcvfs
+import xbmcgui
+import xbmc
+import xbmcaddon
+import datetime
+import time
+import sqlite3
+import json
+from functools import reduce
+
+ADDON_ID = "script.module.simplecache"
+
+class SimpleCache(object):
+ '''simple stateless caching system for Kodi'''
+ enable_mem_cache = True
+ data_is_json = False
+ global_checksum = None
+ _exit = False
+ _auto_clean_interval = datetime.timedelta(hours=4)
+ _win = None
+ _busy_tasks = []
+ _database = None
+
+ def __init__(self):
+ '''Initialize our caching class'''
+ self._win = xbmcgui.Window(10000)
+ self._monitor = xbmc.Monitor()
+ self.check_cleanup()
+ self._log_msg("Initialized")
+
+ def close(self):
+ '''tell any tasks to stop immediately (as we can be called multithreaded) and cleanup objects'''
+ self._exit = True
+ # wait for all tasks to complete
+ while self._busy_tasks and not self._monitor.abortRequested():
+ xbmc.sleep(25)
+ del self._win
+ del self._monitor
+ self._log_msg("Closed")
+
+ def __del__(self):
+ '''make sure close is called'''
+ if not self._exit:
+ self.close()
+
+ def get(self, endpoint, checksum="", json_data=False):
+ '''
+ get object from cache and return the results
+ endpoint: the (unique) name of the cache object as reference
+ checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided
+ '''
+ checksum = self._get_checksum(checksum)
+ cur_time = self._get_timestamp(datetime.datetime.now())
+ result = None
+ # 1: try memory cache first
+ if self.enable_mem_cache:
+ result = self._get_mem_cache(endpoint, checksum, cur_time, json_data)
+
+ # 2: fallback to _database cache
+ if result is None:
+ result = self._get_db_cache(endpoint, checksum, cur_time, json_data)
+
+ return result
+
+ def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=30), json_data=False):
+ '''
+ set data in cache
+ '''
+ task_name = "set.%s" % endpoint
+ self._busy_tasks.append(task_name)
+ checksum = self._get_checksum(checksum)
+ expires = self._get_timestamp(datetime.datetime.now() + expiration)
+
+ # memory cache: write to window property
+ if self.enable_mem_cache and not self._exit:
+ self._set_mem_cache(endpoint, checksum, expires, data, json_data)
+
+ # db cache
+ if not self._exit:
+ self._set_db_cache(endpoint, checksum, expires, data, json_data)
+
+ # remove this task from list
+ self._busy_tasks.remove(task_name)
+
+ def check_cleanup(self):
+ '''check if cleanup is needed - public method, may be called by calling addon'''
+ cur_time = datetime.datetime.now()
+ lastexecuted = self._win.getProperty("simplecache.clean.lastexecuted")
+ if not lastexecuted:
+ self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time))
+ elif (eval(lastexecuted) + self._auto_clean_interval) < cur_time:
+ # cleanup needed...
+ self._do_cleanup()
+
+ def _get_mem_cache(self, endpoint, checksum, cur_time, json_data):
+ '''
+ get cache data from memory cache
+ we use window properties because we need to be stateless
+ '''
+ result = None
+
+ cachedata = self._win.getProperty(endpoint.encode("utf-8"))
+
+ if cachedata:
+ if json_data or self.data_is_json:
+ cachedata = json.loads(cachedata)
+ else:
+ cachedata = eval(cachedata)
+ if cachedata[0] > cur_time:
+ if not checksum or checksum == cachedata[2]:
+ result = cachedata[1]
+ return result
+
+ def _set_mem_cache(self, endpoint, checksum, expires, data, json_data):
+ '''
+ window property cache as alternative for memory cache
+ usefull for (stateless) plugins
+ '''
+ cachedata = (expires, data, checksum)
+ if json_data or self.data_is_json:
+ cachedata_str = json.dumps(cachedata).encode("utf-8")
+ else:
+ cachedata_str = repr(cachedata).encode("utf-8")
+
+ self._win.setProperty(endpoint.encode("utf-8"), cachedata_str)
+
+ def _get_db_cache(self, endpoint, checksum, cur_time, json_data):
+ '''get cache data from sqllite _database'''
+ result = None
+ query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?"
+ cache_data = self._execute_sql(query, (endpoint,))
+ if cache_data:
+ cache_data = cache_data.fetchone()
+ if cache_data and cache_data[0] > cur_time:
+ if not checksum or cache_data[2] == checksum:
+ if json_data or self.data_is_json:
+ result = json.loads(cache_data[1])
+ else:
+ result = eval(cache_data[1])
+ # also set result in memory cache for further access
+ if self.enable_mem_cache:
+ self._set_mem_cache(endpoint, checksum, cache_data[0], result, json_data)
+ return result
+
+ def _set_db_cache(self, endpoint, checksum, expires, data, json_data):
+ ''' store cache data in _database '''
+ query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)"
+ if json_data or self.data_is_json:
+ data = json.dumps(data)
+ else:
+ data = repr(data)
+ self._execute_sql(query, (endpoint, expires, data, checksum))
+
+ def _do_cleanup(self):
+ '''perform cleanup task'''
+ if self._exit or self._monitor.abortRequested():
+ return
+ self._busy_tasks.append(__name__)
+ cur_time = datetime.datetime.now()
+ cur_timestamp = self._get_timestamp(cur_time)
+ self._log_msg("Running cleanup...")
+ if self._win.getProperty("simplecachecleanbusy"):
+ return
+ self._win.setProperty("simplecachecleanbusy", "busy")
+
+ query = "SELECT id, expires FROM simplecache"
+ for cache_data in self._execute_sql(query).fetchall():
+ cache_id = cache_data[0]
+ cache_expires = cache_data[1]
+ if self._exit or self._monitor.abortRequested():
+ return
+ # always cleanup all memory objects on each interval
+ self._win.clearProperty(cache_id.encode("utf-8"))
+ # clean up db cache object only if expired
+ if cache_expires < cur_timestamp:
+ query = 'DELETE FROM simplecache WHERE id = ?'
+ self._execute_sql(query, (cache_id,))
+ self._log_msg("delete from db %s" % cache_id)
+
+ # compact db
+ self._execute_sql("VACUUM")
+
+ # remove task from list
+ self._busy_tasks.remove(__name__)
+ self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time))
+ self._win.clearProperty("simplecachecleanbusy")
+ self._log_msg("Auto cleanup done")
+
+ def _get_database(self):
+ '''get reference to our sqllite _database - performs basic integrity check'''
+ addon = xbmcaddon.Addon(ADDON_ID)
+ dbpath = addon.getAddonInfo('profile')
+ dbfile = xbmc.translatePath("%s/simplecache.db" % dbpath).decode('utf-8')
+
+ if not xbmcvfs.exists(dbpath):
+ xbmcvfs.mkdirs(dbpath)
+ del addon
+ try:
+ connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
+ connection.execute('SELECT * FROM simplecache LIMIT 1')
+ return connection
+ except Exception as error:
+ # our _database is corrupt or doesn't exist yet, we simply try to recreate it
+ if xbmcvfs.exists(dbfile):
+ xbmcvfs.delete(dbfile)
+ try:
+ connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
+ connection.execute(
+ """CREATE TABLE IF NOT EXISTS simplecache(
+ id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""")
+ return connection
+ except Exception as error:
+ self._log_msg("Exception while initializing _database: %s" % str(error), xbmc.LOGWARNING)
+ self.close()
+ return None
+
+ def _execute_sql(self, query, data=None):
+ '''little wrapper around execute and executemany to just retry a db command if db is locked'''
+ retries = 0
+ result = None
+ error = None
+ # always use new db object because we need to be sure that data is available for other simplecache instances
+ with self._get_database() as _database:
+ while not retries == 10 and not self._monitor.abortRequested():
+ if self._exit:
+ return None
+ try:
+ if isinstance(data, list):
+ result = _database.executemany(query, data)
+ elif data:
+ result = _database.execute(query, data)
+ else:
+ result = _database.execute(query)
+ return result
+ except sqlite3.OperationalError as error:
+ if "_database is locked" in error:
+ self._log_msg("retrying DB commit...")
+ retries += 1
+ self._monitor.waitForAbort(0.5)
+ else:
+ break
+ except Exception as error:
+ break
+ self._log_msg("_database ERROR ! -- %s" % str(error), xbmc.LOGWARNING)
+ return None
+
+ @staticmethod
+ def _log_msg(msg, loglevel=xbmc.LOGDEBUG):
+ '''helper to send a message to the kodi log'''
+ if isinstance(msg, unicode):
+ msg = msg.encode('utf-8')
+
+ xbmc.log("Skin Helper Simplecache --> %s" % msg, level=loglevel)
+
+ @staticmethod
+ def _get_timestamp(date_time):
+ '''Converts a datetime object to unix timestamp'''
+ return int(time.mktime(date_time.timetuple()))
+
+ def _get_checksum(self, stringinput):
+ '''get int checksum from string'''
+ if not stringinput and not self.global_checksum:
+ return 0
+ if self.global_checksum:
+ stringinput = "%s-%s" %(self.global_checksum, stringinput)
+ else:
+ stringinput = str(stringinput)
+ return reduce(lambda x, y: x + y, map(ord, stringinput))
+
+
+def use_cache(cache_days=14):
+ '''
+ wrapper around our simple cache to use as decorator
+ Usage: define an instance of SimpleCache with name "cache" (self.cache) in your class
+ Any method that needs caching just add @use_cache as decorator
+ NOTE: use unnamed arguments for calling the method and named arguments for optional settings
+ '''
+ def decorator(func):
+ '''our decorator'''
+ def decorated(*args, **kwargs):
+ '''process the original method and apply caching of the results'''
+ method_class = args[0]
+ method_class_name = method_class.__class__.__name__
+ cache_str = "%s.%s" % (method_class_name, func.__name__)
+ # cache identifier is based on positional args only
+ # named args are considered optional and ignored
+ for item in args[1:]:
+ cache_str += u".%s" % item
+ cache_str = cache_str.lower()
+ cachedata = method_class.cache.get(cache_str)
+ global_cache_ignore = False
+ try:
+ global_cache_ignore = method_class.ignore_cache
+ except Exception:
+ pass
+ if cachedata is not None and not kwargs.get("ignore_cache", False) and not global_cache_ignore:
+ return cachedata
+ else:
+ result = func(*args, **kwargs)
+ method_class.cache.set(cache_str, result, expiration=datetime.timedelta(days=cache_days))
+ return result
+ return decorated
+ return decorator
diff --git a/.install/.kodi/addons/script.module.simplecache/lib/simplecache.pyo b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.pyo
new file mode 100644
index 000000000..a4db6a417
Binary files /dev/null and b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplecache/resources/icon.png b/.install/.kodi/addons/script.module.simplecache/resources/icon.png
new file mode 100644
index 000000000..ca4c53adb
Binary files /dev/null and b/.install/.kodi/addons/script.module.simplecache/resources/icon.png differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo
index a7890129e..389ba142d 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo
index 1bbff8706..594aca458 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo
index 1f45c0473..6aa04bfd8 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo
index a0d8b3bc9..a605c8336 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo
index 6bac340f3..bbdbd9f38 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo
index f2a2b802c..818a6c8c8 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo differ
diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo
index 6011f348f..721ba5778 100644
Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo differ
diff --git a/.install/.kodi/addons/script.module.six/LICENSE b/.install/.kodi/addons/script.module.six/LICENSE
new file mode 100644
index 000000000..d76e02426
--- /dev/null
+++ b/.install/.kodi/addons/script.module.six/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2014 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.install/.kodi/addons/script.module.six/README b/.install/.kodi/addons/script.module.six/README
new file mode 100644
index 000000000..4de73fa95
--- /dev/null
+++ b/.install/.kodi/addons/script.module.six/README
@@ -0,0 +1,16 @@
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports every Python version since 2.5. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at http://pythonhosted.org/six/.
+
+Bugs can be reported to http://bitbucket.org/gutworth/six. The code can also be
+found there.
+
+For questions about six or porting in general, email the python-porting mailing
+list: http://mail.python.org/mailman/listinfo/python-porting
diff --git a/.install/.kodi/addons/script.module.six/addon.xml b/.install/.kodi/addons/script.module.six/addon.xml
new file mode 100644
index 000000000..0a1f6b28c
--- /dev/null
+++ b/.install/.kodi/addons/script.module.six/addon.xml
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+
+ Python 2 and 3 compatibility utilities.
+ Six is a Python 2 and 3 compatibility library. It provides utility functions for smoothing over the differences between the Python versions with the goal of writing Python code that is compatible on both Python versions. See the documentation for more information on what is provided.
+ all
+ MIT
+ https://pypi.org/project/six/
+ https://pypi.org/project/six/
+
+
diff --git a/.install/.kodi/addons/script.module.six/icon.png b/.install/.kodi/addons/script.module.six/icon.png
new file mode 100644
index 000000000..ca4c53adb
Binary files /dev/null and b/.install/.kodi/addons/script.module.six/icon.png differ
diff --git a/.install/.kodi/addons/script.module.six/lib/six.py b/.install/.kodi/addons/script.module.six/lib/six.py
new file mode 100644
index 000000000..357e624ab
--- /dev/null
+++ b/.install/.kodi/addons/script.module.six/lib/six.py
@@ -0,0 +1,963 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson "
+__version__ = "1.13.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif isinstance(s, binary_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/.install/.kodi/addons/script.module.six/lib/six.pyo b/.install/.kodi/addons/script.module.six/lib/six.pyo
new file mode 100644
index 000000000..dbab29d1e
Binary files /dev/null and b/.install/.kodi/addons/script.module.six/lib/six.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo
index e92d67bc8..94abd98b4 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo
index 58d777ebb..ad19660e8 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo
index af3d98687..e43d14fd7 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo
index 47e025cee..e563b5f36 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo
index e93ba009d..3fcd56e76 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo
index a4ae3b65f..f55ddb6fd 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo
index 733cbb6fa..98b67d78d 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo
index 91665b44d..04afcffae 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo
index 26d55490a..32176138e 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo
index 487f69552..f5750c56f 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo
index fa5ff6757..ea3cf7203 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo
index 803341dfa..b8fcd28f0 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo
index d1c33f13a..5a846fb80 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo
index da84627a6..911ac7393 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo
index d49a0278e..95e3cdcd4 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo
index 10c44bd92..d1bc8fa4d 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo
index 0a7dbbb0d..8811f7159 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo
index b4418f796..def744c6d 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo
index b624dcde5..2108e8c80 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo
index e4a4d7dab..16435e88e 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo
index 77b188ba1..9c4b06d73 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo
index 3a024d562..c1fd88a72 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo
index e4bf4f53a..b1fb4cc24 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo
index aed45dd27..939983fc0 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo
index 8835e2654..96fe505ea 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo
index aece5b09b..d4619d363 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo
index 18521ae15..3196837c4 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo differ
diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo
index 8d1f9650f..fc4de4109 100644
Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/addon.xml b/.install/.kodi/addons/script.module.xbmcswift2/addon.xml
index 12e14a2c0..e66e3d59c 100644
--- a/.install/.kodi/addons/script.module.xbmcswift2/addon.xml
+++ b/.install/.kodi/addons/script.module.xbmcswift2/addon.xml
@@ -1,4 +1,4 @@
-
+
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt b/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt
index 9fa934979..d11c2cb13 100644
--- a/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt
+++ b/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt
@@ -1,5 +1,8 @@
CHANGES
=======
+Version 13.0.3 (15/12/2020)
+- Remove offscreen support (only supported in leia++)
+
Version 13.0.0 (12/4/2020)
- Fix args
- py2 only version
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/__init__.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/__init__.pyo
new file mode 100644
index 000000000..1c8a3839e
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/__init__.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/common.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/common.pyo
new file mode 100644
index 000000000..6d9bda90a
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/common.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/constants.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/constants.pyo
new file mode 100644
index 000000000..762f1f06c
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/constants.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py
index c7c8ad898..6ffe5291c 100644
--- a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py
+++ b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py
@@ -17,15 +17,14 @@ class ListItem(object):
of any set properties that xbmcgui doesn't expose getters for.
'''
def __init__(self, label=None, label2=None, icon=None, thumbnail=None,
- path=None, fanart=None, offscreen=False):
+ path=None, fanart=None):
'''Defaults are an emtpy string since xbmcgui.ListItem will not
accept None.
'''
kwargs = {
'label': label,
'label2': label2,
- 'path': path,
- 'offscreen': offscreen
+ 'path': path
}
#kwargs = dict((key, val) for key, val in locals().items() if val is
#not None and key != 'self')
@@ -194,14 +193,13 @@ class ListItem(object):
def from_dict(cls, label=None, label2=None, icon=None, thumbnail=None,
path=None, selected=None, info=None, properties=None,
context_menu=None, replace_context_menu=False,
- is_playable=None, info_type='video', stream_info=None, fanart=None,
- offscreen=False):
+ is_playable=None, info_type='video', stream_info=None, fanart=None):
'''A ListItem constructor for setting a lot of properties not
available in the regular __init__ method. Useful to collect all
the properties in a dict and then use the **dct to call this
method.
'''
- listitem = cls(label, label2, icon, thumbnail, path, fanart, offscreen)
+ listitem = cls(label, label2, icon, thumbnail, path, fanart)
if selected is not None:
listitem.select(selected)
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.pyo
new file mode 100644
index 000000000..2c599e83a
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/logger.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/logger.pyo
new file mode 100644
index 000000000..082e6bdf4
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/logger.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/module.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/module.pyo
new file mode 100644
index 000000000..a9993a977
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/module.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/plugin.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/plugin.pyo
new file mode 100644
index 000000000..5ec3d7fe5
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/plugin.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/request.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/request.pyo
new file mode 100644
index 000000000..89a13380c
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/request.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/storage.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/storage.pyo
new file mode 100644
index 000000000..2e242ac15
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/storage.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/urls.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/urls.pyo
new file mode 100644
index 000000000..9b440cb57
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/urls.pyo differ
diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.pyo
new file mode 100644
index 000000000..4552ea6c0
Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.pyo differ
diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo
index b3a6f562a..781ab747b 100644
Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo differ
diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo
index 54d59c7e4..22df0f7fb 100644
Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo differ
diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo
index b135f059f..b650d520f 100644
Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo differ
diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo
index 0aeaf75d6..a4b1437e1 100644
Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo differ
diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo
index 98527e32e..dccd111c0 100644
Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo differ
diff --git a/.install/.kodi/temp/kodi.log b/.install/.kodi/temp/kodi.log
deleted file mode 100644
index 9ece62d6d..000000000
--- a/.install/.kodi/temp/kodi.log
+++ /dev/null
@@ -1,341 +0,0 @@
-2020-12-13 14:19:18.522 T:2984189216 NOTICE: -----------------------------------------------------------------------
-2020-12-13 14:19:18.522 T:2984189216 NOTICE: Starting Kodi (18.9 (18.9.0) Git:20201027-6d9d93e-dirty). Platform: Linux ARM 32-bit
-2020-12-13 14:19:18.522 T:2984189216 NOTICE: Using Debug Kodi x32 build
-2020-12-13 14:19:18.522 T:2984189216 NOTICE: Kodi compiled 2020-11-18 by GCC 8.3.0 for Linux ARM 32-bit version 5.4.38 (328742)
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: Running on XBian 1.0 (knockout), kernel: Linux ARM 32-bit version 5.4.75+
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: FFmpeg version/source: 4.0.4-Kodi
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: Host CPU: ARMv7 Processor rev 3 (v7l), 4 cores available
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: ARM Features: Neon enabled
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://xbmc/ is mapped to: /usr/local/share/kodi
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://xbmcbin/ is mapped to: /usr/local/lib/kodi
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://xbmcbinaddons/ is mapped to: /usr/local/lib/kodi/addons
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://masterprofile/ is mapped to: /home/xbian/.kodi/userdata
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://envhome/ is mapped to: /home/xbian
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://home/ is mapped to: /home/xbian/.kodi
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://temp/ is mapped to: /home/xbian/.kodi/temp
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://logpath/ is mapped to: /home/xbian/.kodi/temp
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: The executable running is: /usr/local/lib/kodi/kodi-gbm
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: Local hostname: cuvelima
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: Log File is located: /home/xbian/.kodi/temp/kodi.log
-2020-12-13 14:19:18.523 T:2984189216 NOTICE: -----------------------------------------------------------------------
-2020-12-13 14:19:18.524 T:2984189216 INFO: loading settings
-2020-12-13 14:19:18.526 T:2984189216 NOTICE: special://profile/ is mapped to: special://masterprofile/
-2020-12-13 14:19:18.549 T:2984189216 DEBUG: CSkinSettings: no tag found
-2020-12-13 14:19:18.549 T:2984189216 NOTICE: No settings file to load (special://xbmc/system/advancedsettings.xml)
-2020-12-13 14:19:18.549 T:2984189216 NOTICE: No settings file to load (special://masterprofile/advancedsettings.xml)
-2020-12-13 14:19:18.549 T:2984189216 NOTICE: Default Video Player: VideoPlayer
-2020-12-13 14:19:18.549 T:2984189216 NOTICE: Default Audio Player: paplayer
-2020-12-13 14:19:18.549 T:2984189216 NOTICE: Disabled debug logging due to GUI setting. Level 0.
-2020-12-13 14:19:18.549 T:2984189216 NOTICE: Log level changed to "LOG_LEVEL_NORMAL"
-2020-12-13 14:19:18.550 T:2984189216 NOTICE: CMediaSourceSettings: loading media sources from special://masterprofile/sources.xml
-2020-12-13 14:19:18.679 T:2984189216 NOTICE: PulseAudio: Server not running
-2020-12-13 14:19:18.714 T:2984189216 NOTICE: Running database version Addons27
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.2sf v2.0.3 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.asap v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.dumb v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.fluidsynth v2.1.1 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.gme v2.0.3 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.gsf v2.0.3 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.modplug v2.0.3 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.ncsf v2.0.3 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.nosefart v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.openmpt v2.0.4 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.organya v1.2.1 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.qsf v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.sidplay v1.2.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.snesapu v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.ssf v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.stsound v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.timidity v2.0.5 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.upse v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.usf v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.vgmstream v1.1.5 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.wsr v2.0.2 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audioencoder.flac v2.0.6 installed
-2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audioencoder.kodi.builtin.aac v1.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.kodi.builtin.wma v1.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.lame v2.0.4 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.vorbis v2.0.4 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.wav v2.0.3 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: game.controller.default v1.0.8 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: game.controller.snes v1.0.8 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: imagedecoder.heif v1.1.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: imagedecoder.mpo v1.1.2 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: imagedecoder.raw v2.1.2 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: inputstream.adaptive v2.4.6 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: inputstream.rtmp v2.0.9 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.audioengine v1.0.1 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.filesystem v1.0.2 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.general v1.0.3 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.gui v5.12.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.main v1.0.14 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.network v1.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.audiodecoder v2.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.audioencoder v2.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.game v1.1.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.imagedecoder v2.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.inputstream v2.0.8 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.peripheral v1.3.7 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.pvr v5.10.3 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.screensaver v2.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.vfs v2.0.0 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.videocodec v1.0.1 installed
-2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.visualization v2.0.1 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: kodi.resource v1.0.0 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.album.universal v3.1.3 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.artists.universal v4.3.3 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.allmusic.com v3.2.2 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.fanart.tv v3.6.3 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.imdb.com v3.1.6 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.musicbrainz.org v2.2.4 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.theaudiodb.com v2.0.3 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.themoviedb.org v3.2.12 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.local v1.0.0 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.themoviedb.org v5.2.5 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.tvshows.themoviedb.org v3.5.11 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: peripheral.joystick v1.4.9 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: peripheral.xarcade v1.1.0 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.video.arteplussept v1.0.2 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.video.francetv v2.0.0 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.video.vstream v0.8.3 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.xbianconfig v18.0.1 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.argustv v3.5.6 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.dvblink v4.7.3 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.dvbviewer v3.7.13 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.filmon v2.4.6 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.hdhomerun v3.5.1 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.hts v4.4.21 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.iptvarchive v3.7.2 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.iptvsimple v3.9.8 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.mediaportal.tvserver v3.5.19 installed
-2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.mythtv v5.10.19 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.nextpvr v3.3.21 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.njoy v3.4.3 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.octonet v0.7.1 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.pctv v2.4.7 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.stalker v3.4.10 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.teleboy v18.2.3 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.vbox v4.7.0 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.vdr.vnsi v3.6.4 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.vuplus v3.28.9 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.wmc v2.4.6 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.zattoo v18.1.21 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: repository.superrepo.org.gotham.all v0.5.206 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: repository.vstream v0.0.4 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: repository.xbmc.org v3.1.6 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.images.weathericons.default v1.1.8 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.language.en_gb v2.0.1 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.language.fr_fr v9.0.24 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.uisounds.kodi v1.0.0 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: screensaver.shadertoy v2.0.0 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: screensaver.xbmc.builtin.black v1.0.33 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: screensaver.xbmc.builtin.dim v1.0.59 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.beautifulsoup4 v4.6.2 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.certifi v2019.9.11 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.chardet v3.0.4 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.idna v2.8 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.inputstreamhelper v0.5.1 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.pil v1.1.7 installed
-2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.pycryptodome v3.4.3 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.requests v2.22.0 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.simplejson v3.16.1 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.urllib3 v1.25.6 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.xbmcswift2 v13.0.2 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.service.xbian.upstart-bridge v2.0.2 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: service.xbmc.versioncheck v0.5.12 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: skin.estouchy v2.0.28 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: skin.estuary v2.0.27 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.libarchive v1.0.7 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.rar v2.3.2 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.sacd v1.0.4 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.sftp v1.0.6 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: visualization.shadertoy v1.2.4 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: visualization.spectrum v3.0.4 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: visualization.waveform v3.1.2 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: webinterface.default v18.x-2.4.6 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.addon v18.9 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.core v0.1.0 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.gui v5.14.0 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.json v10.3.0 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.metadata v2.1.0 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.python v2.26.0 installed
-2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.webinterface v1.0.0 installed
-2020-12-13 14:19:19.221 T:2984189216 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanSuspend”
-2020-12-13 14:19:19.222 T:2984189216 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanHibernate”
-2020-12-13 14:19:19.248 T:2952786128 NOTICE: Found 2 Lists of Devices
-2020-12-13 14:19:19.248 T:2952786128 NOTICE: Enumerated ALSA devices:
-2020-12-13 14:19:19.248 T:2952786128 NOTICE: Device 1
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceName : default
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayName : Default (bcm2835 HDMI 1 bcm2835 HDMI 1)
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: Device 2
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=b1
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayName : bcm2835 HDMI 1
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 HDMI 1
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: Device 3
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=Headphones
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayName : bcm2835 Headphones
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 Headphones
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR
-2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: Enumerated PI devices:
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: Device 1
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceName : HDMI
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayName : HDMI
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_HDMI
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_channels : FL, FR
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,24000,32000,44100,48000,88200,96000,176400,192000
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32NE,AE_FMT_S16NE,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP,AE_FMT_RAW
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_streamTypes : STREAM_TYPE_AC3,STREAM_TYPE_EAC3,STREAM_TYPE_DTSHD_CORE,STREAM_TYPE_DTS_2048,STREAM_TYPE_DTS_1024,STREAM_TYPE_DTS_512
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: Device 2
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceName : Analogue
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayName : Analogue
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_channels : FL, FR
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_sampleRates : 48000
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: Device 3
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceName : Both
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayName : HDMI and Analogue
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_channels : FL, FR
-2020-12-13 14:19:19.251 T:2952786128 NOTICE: m_sampleRates : 48000
-2020-12-13 14:19:19.251 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP
-2020-12-13 14:19:19.251 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 14:19:19.524 T:2984189216 NOTICE: Raspberry PI firmware version: Jul 13 2020 13:56:29
- Copyright (c) 2012 Broadcom
- version adcebbdb7b415c623931e80795ba3bae68dcc4fa (clean) (release) (start_x)
-2020-12-13 14:19:19.524 T:2984189216 NOTICE: ARM mem: 704MB GPU mem: 320MB MPG2:0 WVC1:0
-2020-12-13 14:19:19.524 T:2984189216 NOTICE: cache.memorysize: 20MB libass.cache: 0MB
-2020-12-13 14:19:19.524 T:2984189216 NOTICE: Config:
- arm_freq=1500
- audio_pwm_mode=514
- config_hdmi_boost=5
- core_freq=500
- core_freq_min=200
- disable_commandline_tags=2
- disable_l2cache=1
- disable_overscan=1
- disable_splash=1
- display_hdmi_rotate=-1
- display_lcd_rotate=-1
- enable_gic=1
- force_eeprom_read=1
- force_pwm_open=1
- framebuffer_ignore_alpha=1
- framebuffer_swap=1
- gpu_freq=500
- gpu_freq_min=250
- init_uart_clock=0x2dc6c00
- initial_turbo=3
- lcd_framerate=60
- mask_gpu_interrupt0=1024
- mask_gpu_interrupt1=0x10000
- max_framebuffers=2
- over_voltage_avs=-20000
- pause_burst_frames=1
- program_serial_random=1
- total_mem=8192
- hdmi_force_cec_address:0=65535
- hdmi_force_cec_address:1=65535
- hdmi_ignore_cec_init:0=1
- hdmi_pixel_freq_limit:0=0x11e1a300
- hdmi_pixel_freq_limit:1=0x11e1a300
-2020-12-13 14:19:19.524 T:2984189216 NOTICE: Config:
- decode_MPG2=0x00000000
- decode_WVC1=0x00000000
- device_tree=-
- overlay_prefix=overlays/
- hdmi_cvt:0=
- hdmi_cvt:1=
- hdmi_edid_filename:0=
- hdmi_edid_filename:1=
- hdmi_timings:0=
- hdmi_timings:1=
-2020-12-13 14:19:20.344 T:2984189216 WARNING: CDRMUtils::FindPlane - could not find plane
-2020-12-13 14:19:20.345 T:2984189216 WARNING: CDRMUtils::InitDrm - failed to set drm master, will try to authorize instead: Permission denied
-2020-12-13 14:19:20.345 T:2984189216 NOTICE: CDRMUtils::InitDrm - successfully authorized drm magic
-2020-12-13 14:19:22.355 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 60.000000 Hz
-2020-12-13 14:19:22.355 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 59.940063 Hz
-2020-12-13 14:19:22.355 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 59.940063 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 50.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 50.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x768 with 1280x768 @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x720 with 1280x720 @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x720 with 1280x720 @ 59.940063 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x720 with 1280x720 @ 50.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1024x768 with 1024x768 @ 75.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1024x768 with 1024x768 @ 70.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1024x768 with 1024x768 @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 75.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 72.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 56.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x576 with 720x576 @ 50.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x576 with 720x576i @ 50.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480 @ 59.940063 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480 @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480i @ 59.940063 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480i @ 60.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 640x480 with 640x480 @ 75.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 640x480 with 640x480 @ 73.000000 Hz
-2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 640x480 with 640x480 @ 60.000000 Hz
-2020-12-13 14:19:22.357 T:2984189216 NOTICE: Previous line repeats 1 times.
-2020-12-13 14:19:22.357 T:2984189216 NOTICE: Found resolution 720x400 with 720x400 @ 70.000000 Hz
-2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_VERSION = 1.4
-2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_VENDOR = Mesa Project
-2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_EXTENSIONS = EGL_EXT_buffer_age EGL_EXT_image_dma_buf_import EGL_EXT_image_dma_buf_import_modifiers EGL_KHR_cl_event2 EGL_KHR_config_attribs EGL_KHR_create_context EGL_KHR_create_context_no_error EGL_KHR_fence_sync EGL_KHR_get_all_proc_addresses EGL_KHR_gl_colorspace EGL_KHR_gl_renderbuffer_image EGL_KHR_gl_texture_2D_image EGL_KHR_gl_texture_3D_image EGL_KHR_gl_texture_cubemap_image EGL_KHR_image EGL_KHR_image_base EGL_KHR_image_pixmap EGL_KHR_no_config_context EGL_KHR_reusable_sync EGL_KHR_surfaceless_context EGL_EXT_pixel_format_float EGL_KHR_wait_sync EGL_MESA_configless_context EGL_MESA_drm_image EGL_MESA_image_dma_buf_export EGL_WL_bind_wayland_display
-2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_CLIENT_EXTENSIONS = EGL_EXT_device_base EGL_EXT_device_enumeration EGL_EXT_device_query EGL_EXT_platform_base EGL_KHR_client_get_all_proc_addresses EGL_EXT_client_extensions EGL_KHR_debug EGL_EXT_platform_wayland EGL_EXT_platform_x11 EGL_MESA_platform_gbm EGL_MESA_platform_surfaceless
-2020-12-13 14:19:22.377 T:2984189216 NOTICE: Checking resolution 16
-2020-12-13 14:19:22.377 T:2984189216 WARNING: CGBMUtils::DestroySurface - surface already destroyed
-2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_VENDOR = Broadcom
-2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_RENDERER = V3D 4.2
-2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_VERSION = OpenGL ES 3.0 Mesa 19.1.7 (git-b9d7244035)
-2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_SHADING_LANGUAGE_VERSION = OpenGL ES GLSL ES 3.00
-2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_EXTENSIONS = GL_EXT_blend_minmax GL_EXT_multi_draw_arrays GL_EXT_texture_format_BGRA8888 GL_OES_compressed_ETC1_RGB8_texture GL_OES_depth24 GL_OES_element_index_uint GL_OES_fbo_render_mipmap GL_OES_mapbuffer GL_OES_rgb8_rgba8 GL_OES_standard_derivatives GL_OES_stencil8 GL_OES_texture_3D GL_OES_texture_float GL_OES_texture_half_float GL_OES_texture_half_float_linear GL_OES_texture_npot GL_OES_vertex_half_float GL_EXT_texture_sRGB_decode GL_OES_EGL_image GL_OES_depth_texture GL_OES_packed_depth_stencil GL_EXT_texture_type_2_10_10_10_REV GL_OES_get_program_binary GL_APPLE_texture_max_level GL_EXT_discard_framebuffer GL_EXT_read_format_bgra GL_EXT_frag_depth GL_NV_fbo_color_attachments GL_OES_EGL_image_external GL_OES_EGL_sync GL_OES_vertex_array_object GL_EXT_occlusion_query_boolean GL_EXT_texture_rg GL_EXT_unpack_subimage GL_NV_draw_buffers GL_NV_read_buffer GL_NV_read_depth GL_NV_read_depth_stencil GL_NV_read_stencil GL_EXT_draw_buffers GL_EXT_map_buffer_range GL_KHR_debug GL_KHR_texture_compression_astc_ldr GL_OES_depth_texture_cube_map GL_OES_required_internalformat GL_OES_surfaceless_context GL_EXT_color_buffer_float GL_EXT_sRGB_write_control GL_EXT_separate_shader_objects GL_EXT_shader_integer_mix GL_EXT_base_instance GL_EXT_compressed_ETC1_RGB8_sub_texture GL_EXT_draw_elements_base_vertex GL_EXT_texture_border_clamp GL_KHR_context_flush_control GL_OES_draw_elements_base_vertex GL_OES_texture_border_clamp GL_OES_texture_stencil8 GL_EXT_float_blend GL_KHR_no_error GL_KHR_texture_compression_astc_sliced_3d GL_OES_EGL_image_external_essl3 GL_MESA_shader_integer_functions GL_KHR_parallel_shader_compile GL_EXT_texture_query_lod
-2020-12-13 14:19:23.375 T:2984189216 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption
-2020-12-13 14:19:23.375 T:2984189216 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled!
-2020-12-13 14:19:23.583 T:2802266320 NOTICE: Running database version Addons27
-2020-12-13 14:19:23.586 T:2802266320 NOTICE: Running database version ViewModes6
-2020-12-13 14:19:23.587 T:2802266320 NOTICE: Running database version Textures13
-2020-12-13 14:19:23.594 T:2802266320 NOTICE: Running database version MyMusic72
-2020-12-13 14:19:23.604 T:2802266320 NOTICE: Running database version MyVideos116
-2020-12-13 14:19:23.607 T:2802266320 NOTICE: Running database version TV32
-2020-12-13 14:19:23.609 T:2802266320 NOTICE: Running database version Epg12
-2020-12-13 14:19:23.624 T:2984189216 NOTICE: start dvd mediatype detection
-2020-12-13 14:19:23.679 T:2984189216 NOTICE: load skin from: /usr/local/share/kodi/addons/skin.estuary (version: 2.0.27)
-2020-12-13 14:19:24.147 T:2802266320 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption
-2020-12-13 14:19:24.147 T:2802266320 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled!
-2020-12-13 14:19:24.153 T:2984189216 WARNING: JSONRPC: Could not parse type "Setting.Details.SettingList"
-2020-12-13 14:19:24.225 T:2984189216 NOTICE: Register - new keyboard device registered on application->keyboard: Clavier (0000:0000)
-2020-12-13 14:19:24.225 T:2984189216 NOTICE: Register - new mouse device registered on application->mouse: Souris (0000:0000)
-2020-12-13 14:19:24.229 T:2984189216 NOTICE: Loading player core factory settings from special://xbmc/system/playercorefactory.xml.
-2020-12-13 14:19:24.230 T:2984189216 NOTICE: Loaded playercorefactory configuration
-2020-12-13 14:19:24.230 T:2984189216 NOTICE: Loading player core factory settings from special://masterprofile/playercorefactory.xml.
-2020-12-13 14:19:24.230 T:2984189216 NOTICE: special://masterprofile/playercorefactory.xml does not exist. Skipping.
-2020-12-13 14:19:24.259 T:2984189216 NOTICE: initialize done
-2020-12-13 14:19:24.259 T:2984189216 NOTICE: XBian: notifying Upstart that I'm well
-2020-12-13 14:19:24.429 T:2984189216 NOTICE: Running the application...
-2020-12-13 14:19:24.435 T:2984189216 NOTICE: starting zeroconf publishing
-2020-12-13 14:19:24.436 T:2984189216 NOTICE: CWebServer[8080]: Started
-2020-12-13 14:19:24.440 T:2655285456 NOTICE: ES: Starting UDP Event server on port 9777
-2020-12-13 14:19:24.440 T:2655285456 NOTICE: UDP: Listening on port 9777 (ipv6 : false)
-2020-12-13 14:19:24.618 T:2749178064 NOTICE: Register - new cec device registered on cec->RPI: CEC Adapter (2708:1001)
diff --git a/.install/.kodi/temp/kodi.old.log b/.install/.kodi/temp/kodi.old.log
deleted file mode 100644
index 7a2d96470..000000000
--- a/.install/.kodi/temp/kodi.old.log
+++ /dev/null
@@ -1,357 +0,0 @@
-2020-12-13 13:53:53.373 T:2984099104 NOTICE: -----------------------------------------------------------------------
-2020-12-13 13:53:53.373 T:2984099104 NOTICE: Starting Kodi (18.9 (18.9.0) Git:20201027-6d9d93e-dirty). Platform: Linux ARM 32-bit
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: Using Debug Kodi x32 build
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: Kodi compiled 2020-11-18 by GCC 8.3.0 for Linux ARM 32-bit version 5.4.38 (328742)
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: Running on XBian 1.0 (knockout), kernel: Linux ARM 32-bit version 5.4.75+
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: FFmpeg version/source: 4.0.4-Kodi
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: Host CPU: ARMv7 Processor rev 3 (v7l), 4 cores available
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: ARM Features: Neon enabled
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://xbmc/ is mapped to: /usr/local/share/kodi
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://xbmcbin/ is mapped to: /usr/local/lib/kodi
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://xbmcbinaddons/ is mapped to: /usr/local/lib/kodi/addons
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://masterprofile/ is mapped to: /home/xbian/.kodi/userdata
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://envhome/ is mapped to: /home/xbian
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://home/ is mapped to: /home/xbian/.kodi
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://temp/ is mapped to: /home/xbian/.kodi/temp
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://logpath/ is mapped to: /home/xbian/.kodi/temp
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: The executable running is: /usr/local/lib/kodi/kodi-gbm
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: Local hostname: cuvelima
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: Log File is located: /home/xbian/.kodi/temp/kodi.log
-2020-12-13 13:53:53.374 T:2984099104 NOTICE: -----------------------------------------------------------------------
-2020-12-13 13:53:53.375 T:2984099104 INFO: loading settings
-2020-12-13 13:53:53.377 T:2984099104 NOTICE: special://profile/ is mapped to: special://masterprofile/
-2020-12-13 13:53:53.400 T:2984099104 DEBUG: CSkinSettings: no tag found
-2020-12-13 13:53:53.400 T:2984099104 NOTICE: No settings file to load (special://xbmc/system/advancedsettings.xml)
-2020-12-13 13:53:53.400 T:2984099104 NOTICE: No settings file to load (special://masterprofile/advancedsettings.xml)
-2020-12-13 13:53:53.400 T:2984099104 NOTICE: Default Video Player: VideoPlayer
-2020-12-13 13:53:53.400 T:2984099104 NOTICE: Default Audio Player: paplayer
-2020-12-13 13:53:53.400 T:2984099104 NOTICE: Disabled debug logging due to GUI setting. Level 0.
-2020-12-13 13:53:53.400 T:2984099104 NOTICE: Log level changed to "LOG_LEVEL_NORMAL"
-2020-12-13 13:53:53.401 T:2984099104 NOTICE: CMediaSourceSettings: loading media sources from special://masterprofile/sources.xml
-2020-12-13 13:53:53.534 T:2984099104 NOTICE: PulseAudio: Server not running
-2020-12-13 13:53:53.570 T:2984099104 NOTICE: Running database version Addons27
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.2sf v2.0.3 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.asap v2.0.2 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.dumb v2.0.2 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.fluidsynth v2.1.1 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.gme v2.0.3 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.gsf v2.0.3 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.modplug v2.0.3 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.ncsf v2.0.3 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.nosefart v2.0.2 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.openmpt v2.0.4 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.organya v1.2.1 installed
-2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.qsf v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.sidplay v1.2.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.snesapu v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.ssf v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.stsound v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.timidity v2.0.5 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.upse v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.usf v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.vgmstream v1.1.5 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.wsr v2.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.flac v2.0.6 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.kodi.builtin.aac v1.0.0 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.kodi.builtin.wma v1.0.0 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.lame v2.0.4 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.vorbis v2.0.4 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.wav v2.0.3 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: game.controller.default v1.0.8 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: game.controller.snes v1.0.8 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: imagedecoder.heif v1.1.0 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: imagedecoder.mpo v1.1.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: imagedecoder.raw v2.1.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: inputstream.adaptive v2.4.6 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: inputstream.rtmp v2.0.9 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.audioengine v1.0.1 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.filesystem v1.0.2 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.general v1.0.3 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.gui v5.12.0 installed
-2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.main v1.0.14 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.global.network v1.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.audiodecoder v2.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.audioencoder v2.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.game v1.1.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.imagedecoder v2.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.inputstream v2.0.8 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.peripheral v1.3.7 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.pvr v5.10.3 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.screensaver v2.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.vfs v2.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.videocodec v1.0.1 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.visualization v2.0.1 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.resource v1.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.album.universal v3.1.3 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.artists.universal v4.3.3 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.allmusic.com v3.2.2 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.fanart.tv v3.6.3 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.imdb.com v3.1.6 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.musicbrainz.org v2.2.4 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.theaudiodb.com v2.0.3 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.themoviedb.org v3.2.12 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.local v1.0.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.themoviedb.org v5.2.5 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.tvshows.themoviedb.org v3.5.11 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: peripheral.joystick v1.4.9 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: peripheral.xarcade v1.1.0 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: plugin.video.arteplussept v1.0.2 installed
-2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: plugin.video.francetv v2.0.0 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: plugin.video.vstream v0.8.3 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: plugin.xbianconfig v18.0.1 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.argustv v3.5.6 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.dvblink v4.7.3 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.dvbviewer v3.7.13 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.filmon v2.4.6 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.hdhomerun v3.5.1 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.hts v4.4.21 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.iptvarchive v3.7.2 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.iptvsimple v3.9.8 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.mediaportal.tvserver v3.5.19 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.mythtv v5.10.19 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.nextpvr v3.3.21 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.njoy v3.4.3 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.octonet v0.7.1 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.pctv v2.4.7 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.stalker v3.4.10 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.teleboy v18.2.3 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.vbox v4.7.0 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.vdr.vnsi v3.6.4 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.vuplus v3.28.9 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.wmc v2.4.6 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.zattoo v18.1.21 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: repository.superrepo.org.gotham.all v0.5.206 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: repository.vstream v0.0.4 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: repository.xbmc.org v3.1.6 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: resource.images.weathericons.default v1.1.8 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: resource.language.en_gb v2.0.1 installed
-2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: resource.language.fr_fr v9.0.24 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: resource.uisounds.kodi v1.0.0 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: screensaver.shadertoy v2.0.0 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: screensaver.xbmc.builtin.black v1.0.33 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: screensaver.xbmc.builtin.dim v1.0.59 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.beautifulsoup4 v4.6.2 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.certifi v2019.9.11 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.chardet v3.0.4 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.idna v2.8 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.inputstreamhelper v0.5.1 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.pil v1.1.7 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.pycryptodome v3.4.3 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.requests v2.22.0 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.simplejson v3.16.1 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.urllib3 v1.25.6 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.xbmcswift2 v13.0.2 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.service.xbian.upstart-bridge v2.0.2 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: service.xbmc.versioncheck v0.5.12 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: skin.estouchy v2.0.28 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: skin.estuary v2.0.27 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.libarchive v1.0.7 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.rar v2.3.2 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.sacd v1.0.4 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.sftp v1.0.6 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: visualization.shadertoy v1.2.4 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: visualization.spectrum v3.0.4 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: visualization.waveform v3.1.2 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: webinterface.default v18.x-2.4.6 installed
-2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: xbmc.addon v18.9 installed
-2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.core v0.1.0 installed
-2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.gui v5.14.0 installed
-2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.json v10.3.0 installed
-2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.metadata v2.1.0 installed
-2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.python v2.26.0 installed
-2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.webinterface v1.0.0 installed
-2020-12-13 13:53:54.073 T:2984099104 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanSuspend”
-2020-12-13 13:53:54.073 T:2984099104 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanHibernate”
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: Found 2 Lists of Devices
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: Enumerated ALSA devices:
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: Device 1
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_deviceName : default
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_displayName : Default (bcm2835 HDMI 1 bcm2835 HDMI 1)
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 13:53:54.100 T:2952786128 NOTICE: Device 2
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=b1
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayName : bcm2835 HDMI 1
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 HDMI 1
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: Device 3
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=Headphones
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayName : bcm2835 Headphones
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 Headphones
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: Enumerated PI devices:
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: Device 1
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceName : HDMI
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayName : HDMI
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_HDMI
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_channels : FL, FR
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,24000,32000,44100,48000,88200,96000,176400,192000
-2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32NE,AE_FMT_S16NE,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP,AE_FMT_RAW
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_streamTypes : STREAM_TYPE_AC3,STREAM_TYPE_EAC3,STREAM_TYPE_DTSHD_CORE,STREAM_TYPE_DTS_2048,STREAM_TYPE_DTS_1024,STREAM_TYPE_DTS_512
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: Device 2
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceName : Analogue
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayName : Analogue
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_channels : FL, FR
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_sampleRates : 48000
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: Device 3
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceName : Both
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayName : HDMI and Analogue
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayNameExtra:
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_channels : FL, FR
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_sampleRates : 48000
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP
-2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities
-2020-12-13 13:53:54.379 T:2984099104 NOTICE: Raspberry PI firmware version: Jul 13 2020 13:56:29
- Copyright (c) 2012 Broadcom
- version adcebbdb7b415c623931e80795ba3bae68dcc4fa (clean) (release) (start_x)
-2020-12-13 13:53:54.379 T:2984099104 NOTICE: ARM mem: 704MB GPU mem: 320MB MPG2:0 WVC1:0
-2020-12-13 13:53:54.379 T:2984099104 NOTICE: cache.memorysize: 20MB libass.cache: 0MB
-2020-12-13 13:53:54.379 T:2984099104 NOTICE: Config:
- arm_freq=1500
- audio_pwm_mode=514
- config_hdmi_boost=5
- core_freq=500
- core_freq_min=200
- disable_commandline_tags=2
- disable_l2cache=1
- disable_overscan=1
- disable_splash=1
- display_hdmi_rotate=-1
- display_lcd_rotate=-1
- enable_gic=1
- force_eeprom_read=1
- force_pwm_open=1
- framebuffer_ignore_alpha=1
- framebuffer_swap=1
- gpu_freq=500
- gpu_freq_min=250
- init_uart_clock=0x2dc6c00
- initial_turbo=3
- lcd_framerate=60
- mask_gpu_interrupt0=1024
- mask_gpu_interrupt1=0x10000
- max_framebuffers=2
- over_voltage_avs=-20000
- pause_burst_frames=1
- program_serial_random=1
- total_mem=8192
- hdmi_force_cec_address:0=65535
- hdmi_force_cec_address:1=65535
- hdmi_ignore_cec_init:0=1
- hdmi_pixel_freq_limit:0=0x11e1a300
- hdmi_pixel_freq_limit:1=0x11e1a300
-2020-12-13 13:53:54.380 T:2984099104 NOTICE: Config:
- decode_MPG2=0x00000000
- decode_WVC1=0x00000000
- device_tree=-
- overlay_prefix=overlays/
- hdmi_cvt:0=
- hdmi_cvt:1=
- hdmi_edid_filename:0=
- hdmi_edid_filename:1=
- hdmi_timings:0=
- hdmi_timings:1=
-2020-12-13 13:53:55.057 T:2984099104 WARNING: CDRMUtils::FindPlane - could not find plane
-2020-12-13 13:53:55.058 T:2984099104 WARNING: CDRMUtils::InitDrm - failed to set drm master, will try to authorize instead: Permission denied
-2020-12-13 13:53:55.058 T:2984099104 NOTICE: CDRMUtils::InitDrm - successfully authorized drm magic
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 60.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 59.940063 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 60.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 59.940063 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 50.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 50.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x768 with 1280x768 @ 60.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x720 with 1280x720 @ 60.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x720 with 1280x720 @ 59.940063 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x720 with 1280x720 @ 50.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1024x768 with 1024x768 @ 75.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1024x768 with 1024x768 @ 70.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1024x768 with 1024x768 @ 60.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 75.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 72.000000 Hz
-2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 60.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 56.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x576 with 720x576 @ 50.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x576 with 720x576i @ 50.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480 @ 59.940063 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480 @ 60.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480i @ 59.940063 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480i @ 60.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 640x480 with 640x480 @ 75.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 640x480 with 640x480 @ 73.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 640x480 with 640x480 @ 60.000000 Hz
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Previous line repeats 1 times.
-2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x400 with 720x400 @ 70.000000 Hz
-2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_VERSION = 1.4
-2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_VENDOR = Mesa Project
-2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_EXTENSIONS = EGL_EXT_buffer_age EGL_EXT_image_dma_buf_import EGL_EXT_image_dma_buf_import_modifiers EGL_KHR_cl_event2 EGL_KHR_config_attribs EGL_KHR_create_context EGL_KHR_create_context_no_error EGL_KHR_fence_sync EGL_KHR_get_all_proc_addresses EGL_KHR_gl_colorspace EGL_KHR_gl_renderbuffer_image EGL_KHR_gl_texture_2D_image EGL_KHR_gl_texture_3D_image EGL_KHR_gl_texture_cubemap_image EGL_KHR_image EGL_KHR_image_base EGL_KHR_image_pixmap EGL_KHR_no_config_context EGL_KHR_reusable_sync EGL_KHR_surfaceless_context EGL_EXT_pixel_format_float EGL_KHR_wait_sync EGL_MESA_configless_context EGL_MESA_drm_image EGL_MESA_image_dma_buf_export EGL_WL_bind_wayland_display
-2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_CLIENT_EXTENSIONS = EGL_EXT_device_base EGL_EXT_device_enumeration EGL_EXT_device_query EGL_EXT_platform_base EGL_KHR_client_get_all_proc_addresses EGL_EXT_client_extensions EGL_KHR_debug EGL_EXT_platform_wayland EGL_EXT_platform_x11 EGL_MESA_platform_gbm EGL_MESA_platform_surfaceless
-2020-12-13 13:53:57.087 T:2984099104 NOTICE: Checking resolution 16
-2020-12-13 13:53:57.087 T:2984099104 WARNING: CGBMUtils::DestroySurface - surface already destroyed
-2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_VENDOR = Broadcom
-2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_RENDERER = V3D 4.2
-2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_VERSION = OpenGL ES 3.0 Mesa 19.1.7 (git-b9d7244035)
-2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_SHADING_LANGUAGE_VERSION = OpenGL ES GLSL ES 3.00
-2020-12-13 13:53:57.135 T:2984099104 NOTICE: GL_EXTENSIONS = GL_EXT_blend_minmax GL_EXT_multi_draw_arrays GL_EXT_texture_format_BGRA8888 GL_OES_compressed_ETC1_RGB8_texture GL_OES_depth24 GL_OES_element_index_uint GL_OES_fbo_render_mipmap GL_OES_mapbuffer GL_OES_rgb8_rgba8 GL_OES_standard_derivatives GL_OES_stencil8 GL_OES_texture_3D GL_OES_texture_float GL_OES_texture_half_float GL_OES_texture_half_float_linear GL_OES_texture_npot GL_OES_vertex_half_float GL_EXT_texture_sRGB_decode GL_OES_EGL_image GL_OES_depth_texture GL_OES_packed_depth_stencil GL_EXT_texture_type_2_10_10_10_REV GL_OES_get_program_binary GL_APPLE_texture_max_level GL_EXT_discard_framebuffer GL_EXT_read_format_bgra GL_EXT_frag_depth GL_NV_fbo_color_attachments GL_OES_EGL_image_external GL_OES_EGL_sync GL_OES_vertex_array_object GL_EXT_occlusion_query_boolean GL_EXT_texture_rg GL_EXT_unpack_subimage GL_NV_draw_buffers GL_NV_read_buffer GL_NV_read_depth GL_NV_read_depth_stencil GL_NV_read_stencil GL_EXT_draw_buffers GL_EXT_map_buffer_range GL_KHR_debug GL_KHR_texture_compression_astc_ldr GL_OES_depth_texture_cube_map GL_OES_required_internalformat GL_OES_surfaceless_context GL_EXT_color_buffer_float GL_EXT_sRGB_write_control GL_EXT_separate_shader_objects GL_EXT_shader_integer_mix GL_EXT_base_instance GL_EXT_compressed_ETC1_RGB8_sub_texture GL_EXT_draw_elements_base_vertex GL_EXT_texture_border_clamp GL_KHR_context_flush_control GL_OES_draw_elements_base_vertex GL_OES_texture_border_clamp GL_OES_texture_stencil8 GL_EXT_float_blend GL_KHR_no_error GL_KHR_texture_compression_astc_sliced_3d GL_OES_EGL_image_external_essl3 GL_MESA_shader_integer_functions GL_KHR_parallel_shader_compile GL_EXT_texture_query_lod
-2020-12-13 13:53:58.071 T:2984099104 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption
-2020-12-13 13:53:58.071 T:2984099104 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled!
-2020-12-13 13:53:58.277 T:2802258128 NOTICE: Running database version Addons27
-2020-12-13 13:53:58.279 T:2802258128 NOTICE: Running database version ViewModes6
-2020-12-13 13:53:58.281 T:2802258128 NOTICE: Running database version Textures13
-2020-12-13 13:53:58.287 T:2802258128 NOTICE: Running database version MyMusic72
-2020-12-13 13:53:58.297 T:2802258128 NOTICE: Running database version MyVideos116
-2020-12-13 13:53:58.301 T:2802258128 NOTICE: Running database version TV32
-2020-12-13 13:53:58.303 T:2802258128 NOTICE: Running database version Epg12
-2020-12-13 13:53:58.321 T:2984099104 NOTICE: start dvd mediatype detection
-2020-12-13 13:53:58.376 T:2984099104 NOTICE: load skin from: /usr/local/share/kodi/addons/skin.estuary (version: 2.0.27)
-2020-12-13 13:53:58.845 T:2802258128 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption
-2020-12-13 13:53:58.845 T:2802258128 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled!
-2020-12-13 13:53:58.852 T:2984099104 WARNING: JSONRPC: Could not parse type "Setting.Details.SettingList"
-2020-12-13 13:53:58.924 T:2984099104 NOTICE: Register - new keyboard device registered on application->keyboard: Clavier (0000:0000)
-2020-12-13 13:53:58.925 T:2984099104 NOTICE: Register - new mouse device registered on application->mouse: Souris (0000:0000)
-2020-12-13 13:53:58.929 T:2984099104 NOTICE: Loading player core factory settings from special://xbmc/system/playercorefactory.xml.
-2020-12-13 13:53:58.929 T:2984099104 NOTICE: Loaded playercorefactory configuration
-2020-12-13 13:53:58.930 T:2984099104 NOTICE: Loading player core factory settings from special://masterprofile/playercorefactory.xml.
-2020-12-13 13:53:58.930 T:2984099104 NOTICE: special://masterprofile/playercorefactory.xml does not exist. Skipping.
-2020-12-13 13:53:58.963 T:2984099104 NOTICE: initialize done
-2020-12-13 13:53:58.963 T:2984099104 NOTICE: XBian: notifying Upstart that I'm well
-2020-12-13 13:53:59.127 T:2984099104 NOTICE: Running the application...
-2020-12-13 13:53:59.133 T:2984099104 NOTICE: starting zeroconf publishing
-2020-12-13 13:53:59.135 T:2984099104 NOTICE: CWebServer[8080]: Started
-2020-12-13 13:53:59.140 T:2655494352 NOTICE: ES: Starting UDP Event server on port 9777
-2020-12-13 13:53:59.141 T:2655494352 NOTICE: UDP: Listening on port 9777 (ipv6 : false)
-2020-12-13 13:53:59.304 T:2749124816 NOTICE: Register - new cec device registered on cec->RPI: CEC Adapter (2708:1001)
-2020-12-13 14:18:55.251 T:2984099104 NOTICE: Stopping player
-2020-12-13 14:18:55.251 T:2984099104 NOTICE: Storing total System Uptime
-2020-12-13 14:18:55.251 T:2984099104 NOTICE: Saving settings
-2020-12-13 14:18:55.255 T:2984099104 NOTICE: Saving skin settings
-2020-12-13 14:18:55.358 T:2984099104 NOTICE: Stopping all
-2020-12-13 14:18:55.358 T:2984099104 NOTICE: ES: Stopping event server
-2020-12-13 14:18:55.358 T:2984099104 NOTICE: stopping zeroconf publishing
-2020-12-13 14:18:55.364 T:2984099104 NOTICE: CWebServer[8080]: Stopped
-2020-12-13 14:18:55.758 T:2655494352 NOTICE: ES: UDP Event server stopped
-2020-12-13 14:18:55.770 T:2984099104 NOTICE: stop dvd detect media
-2020-12-13 14:18:56.161 T:2984099104 NOTICE: Application stopped
-2020-12-13 14:18:56.380 T:2984099104 ERROR: DBus error: org.freedesktop.DBus.Error.IOError - Input/output error
-2020-12-13 14:18:56.380 T:2984099104 NOTICE: XBApplicationEx: destroying...
-2020-12-13 14:18:58.510 T:2984099104 NOTICE: unload skin
-2020-12-13 14:18:58.552 T:2984099104 NOTICE: unload sections
-2020-12-13 14:18:59.119 T:2984099104 NOTICE: XBApplicationEx: application stopped!
diff --git a/.install/.kodi/userdata/Database/Addons27.db b/.install/.kodi/userdata/Database/Addons27.db
index 76ee780cb..f186a9319 100644
Binary files a/.install/.kodi/userdata/Database/Addons27.db and b/.install/.kodi/userdata/Database/Addons27.db differ
diff --git a/.install/.kodi/userdata/Database/MyVideos116.db b/.install/.kodi/userdata/Database/MyVideos116.db
index 30c384eb0..5b4b0a219 100644
Binary files a/.install/.kodi/userdata/Database/MyVideos116.db and b/.install/.kodi/userdata/Database/MyVideos116.db differ
diff --git a/.install/.kodi/userdata/Database/Textures13.db b/.install/.kodi/userdata/Database/Textures13.db
deleted file mode 100644
index a8b9edd3e..000000000
Binary files a/.install/.kodi/userdata/Database/Textures13.db and /dev/null differ
diff --git a/.install/.kodi/userdata/Database/ViewModes6.db b/.install/.kodi/userdata/Database/ViewModes6.db
index b13fd464a..9fb1e6227 100644
Binary files a/.install/.kodi/userdata/Database/ViewModes6.db and b/.install/.kodi/userdata/Database/ViewModes6.db differ
diff --git a/.install/.kodi/userdata/Thumbnails/0/0562359b.png b/.install/.kodi/userdata/Thumbnails/0/0562359b.png
deleted file mode 100644
index 7f97c85ae..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/0/0562359b.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/0/09d2e90e.jpg b/.install/.kodi/userdata/Thumbnails/0/09d2e90e.jpg
deleted file mode 100644
index 4248d032a..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/0/09d2e90e.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/0/0c782ba5.png b/.install/.kodi/userdata/Thumbnails/0/0c782ba5.png
deleted file mode 100644
index fd5371d48..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/0/0c782ba5.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/0/0da69cb4.jpg b/.install/.kodi/userdata/Thumbnails/0/0da69cb4.jpg
deleted file mode 100644
index 5f4cffbe9..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/0/0da69cb4.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/1/163488dd.png b/.install/.kodi/userdata/Thumbnails/1/163488dd.png
deleted file mode 100644
index 982ed25fd..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/1/163488dd.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/1/1dc90357.png b/.install/.kodi/userdata/Thumbnails/1/1dc90357.png
deleted file mode 100644
index e70974b9f..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/1/1dc90357.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/1/1e8cabe0.png b/.install/.kodi/userdata/Thumbnails/1/1e8cabe0.png
deleted file mode 100644
index 65e92b6df..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/1/1e8cabe0.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/1/1fe20bad.png b/.install/.kodi/userdata/Thumbnails/1/1fe20bad.png
deleted file mode 100644
index 8283731f9..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/1/1fe20bad.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/2/2218c381.jpg b/.install/.kodi/userdata/Thumbnails/2/2218c381.jpg
deleted file mode 100644
index 9a40d887a..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/2/2218c381.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/2/228846d1.png b/.install/.kodi/userdata/Thumbnails/2/228846d1.png
deleted file mode 100644
index 80c464cd2..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/2/228846d1.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/2/230b0f94.jpg b/.install/.kodi/userdata/Thumbnails/2/230b0f94.jpg
deleted file mode 100644
index d12157b60..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/2/230b0f94.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/3/33e341a9.jpg b/.install/.kodi/userdata/Thumbnails/3/33e341a9.jpg
deleted file mode 100644
index fa465d35e..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/3/33e341a9.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/3/3894fe5d.jpg b/.install/.kodi/userdata/Thumbnails/3/3894fe5d.jpg
deleted file mode 100644
index 37834ccd1..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/3/3894fe5d.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/3/3a8ed83f.jpg b/.install/.kodi/userdata/Thumbnails/3/3a8ed83f.jpg
deleted file mode 100644
index fa465d35e..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/3/3a8ed83f.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/4/41d78f97.png b/.install/.kodi/userdata/Thumbnails/4/41d78f97.png
deleted file mode 100644
index ddb7bc329..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/4/41d78f97.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/4/48bafe5e.jpg b/.install/.kodi/userdata/Thumbnails/4/48bafe5e.jpg
deleted file mode 100644
index f79f12f74..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/4/48bafe5e.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/4/49dafe0f.jpg b/.install/.kodi/userdata/Thumbnails/4/49dafe0f.jpg
deleted file mode 100644
index 88c1f88bb..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/4/49dafe0f.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/4/4ae0fd0a.jpg b/.install/.kodi/userdata/Thumbnails/4/4ae0fd0a.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/4/4ae0fd0a.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/4/4f719d58.png b/.install/.kodi/userdata/Thumbnails/4/4f719d58.png
deleted file mode 100644
index 35ed3c447..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/4/4f719d58.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/52fb809f.png b/.install/.kodi/userdata/Thumbnails/5/52fb809f.png
deleted file mode 100644
index a2a87de57..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/52fb809f.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/557763a5.jpg b/.install/.kodi/userdata/Thumbnails/5/557763a5.jpg
deleted file mode 100644
index 565725bd4..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/557763a5.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/56cde9fa.jpg b/.install/.kodi/userdata/Thumbnails/5/56cde9fa.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/56cde9fa.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/57b5bfce.jpg b/.install/.kodi/userdata/Thumbnails/5/57b5bfce.jpg
deleted file mode 100644
index 6941f4415..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/57b5bfce.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/5b9d7b56.png b/.install/.kodi/userdata/Thumbnails/5/5b9d7b56.png
deleted file mode 100644
index dc940ff38..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/5b9d7b56.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/5ba4d498.jpg b/.install/.kodi/userdata/Thumbnails/5/5ba4d498.jpg
deleted file mode 100644
index 81595536e..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/5ba4d498.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/5bb33702.png b/.install/.kodi/userdata/Thumbnails/5/5bb33702.png
deleted file mode 100644
index 5e415de34..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/5bb33702.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/5bc908e2.png b/.install/.kodi/userdata/Thumbnails/5/5bc908e2.png
deleted file mode 100644
index fb7fa8db7..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/5bc908e2.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/5/5ce7e216.png b/.install/.kodi/userdata/Thumbnails/5/5ce7e216.png
deleted file mode 100644
index c01b316d4..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/5/5ce7e216.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/6/69ca5288.png b/.install/.kodi/userdata/Thumbnails/6/69ca5288.png
deleted file mode 100644
index d03ea5d23..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/6/69ca5288.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/6/6b3c0c32.png b/.install/.kodi/userdata/Thumbnails/6/6b3c0c32.png
deleted file mode 100644
index 06a9856ec..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/6/6b3c0c32.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/6/6fcacd67.jpg b/.install/.kodi/userdata/Thumbnails/6/6fcacd67.jpg
deleted file mode 100644
index 5f4cffbe9..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/6/6fcacd67.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/8/808a55f7.jpg b/.install/.kodi/userdata/Thumbnails/8/808a55f7.jpg
deleted file mode 100644
index 6a88ebd33..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/8/808a55f7.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/8/8977c3a3.png b/.install/.kodi/userdata/Thumbnails/8/8977c3a3.png
deleted file mode 100644
index 85e2ed447..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/8/8977c3a3.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/9/94234470.jpg b/.install/.kodi/userdata/Thumbnails/9/94234470.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/9/94234470.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/a5d64abb.png b/.install/.kodi/userdata/Thumbnails/a/a5d64abb.png
deleted file mode 100644
index 0f0a1ffbc..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/a5d64abb.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/a5e4a176.png b/.install/.kodi/userdata/Thumbnails/a/a5e4a176.png
deleted file mode 100644
index cc633d541..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/a5e4a176.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/aa93a2f6.jpg b/.install/.kodi/userdata/Thumbnails/a/aa93a2f6.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/aa93a2f6.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/abb05214.jpg b/.install/.kodi/userdata/Thumbnails/a/abb05214.jpg
deleted file mode 100644
index 068a773f7..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/abb05214.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/ac0efbca.png b/.install/.kodi/userdata/Thumbnails/a/ac0efbca.png
deleted file mode 100644
index c43c57b4b..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/ac0efbca.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/ac0fd81a.png b/.install/.kodi/userdata/Thumbnails/a/ac0fd81a.png
deleted file mode 100644
index b9437e41a..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/ac0fd81a.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/a/af1a3233.jpg b/.install/.kodi/userdata/Thumbnails/a/af1a3233.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/a/af1a3233.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/b/b44affb0.png b/.install/.kodi/userdata/Thumbnails/b/b44affb0.png
deleted file mode 100644
index 77070184b..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/b/b44affb0.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/b/b71e7b6c.png b/.install/.kodi/userdata/Thumbnails/b/b71e7b6c.png
deleted file mode 100644
index c43c57b4b..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/b/b71e7b6c.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/c/c11b8e04.jpg b/.install/.kodi/userdata/Thumbnails/c/c11b8e04.jpg
deleted file mode 100644
index 565725bd4..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/c/c11b8e04.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/c/c3592a77.png b/.install/.kodi/userdata/Thumbnails/c/c3592a77.png
deleted file mode 100644
index a094d4e36..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/c/c3592a77.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/c/c7e556fc.jpg b/.install/.kodi/userdata/Thumbnails/c/c7e556fc.jpg
deleted file mode 100644
index 15d2dae88..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/c/c7e556fc.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/c/cbda80a5.jpg b/.install/.kodi/userdata/Thumbnails/c/cbda80a5.jpg
deleted file mode 100644
index 0fdc6eb66..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/c/cbda80a5.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/c/cfd55d9e.jpg b/.install/.kodi/userdata/Thumbnails/c/cfd55d9e.jpg
deleted file mode 100644
index 37834ccd1..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/c/cfd55d9e.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/d/d12803d9.png b/.install/.kodi/userdata/Thumbnails/d/d12803d9.png
deleted file mode 100644
index 49ea80c88..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/d/d12803d9.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/d/d2c983a6.jpg b/.install/.kodi/userdata/Thumbnails/d/d2c983a6.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/d/d2c983a6.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/d/da7c3aaa.png b/.install/.kodi/userdata/Thumbnails/d/da7c3aaa.png
deleted file mode 100644
index 438a9755c..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/d/da7c3aaa.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/d/dc5a0d26.jpg b/.install/.kodi/userdata/Thumbnails/d/dc5a0d26.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/d/dc5a0d26.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/d/dd290f35.jpg b/.install/.kodi/userdata/Thumbnails/d/dd290f35.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/d/dd290f35.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/d/dea41009.jpg b/.install/.kodi/userdata/Thumbnails/d/dea41009.jpg
deleted file mode 100644
index a20b1fac6..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/d/dea41009.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/e/e2a80d9a.jpg b/.install/.kodi/userdata/Thumbnails/e/e2a80d9a.jpg
deleted file mode 100644
index a4e7b77c8..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/e/e2a80d9a.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/e/e47f7b27.jpg b/.install/.kodi/userdata/Thumbnails/e/e47f7b27.jpg
deleted file mode 100644
index 6a88ebd33..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/e/e47f7b27.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/e/e62daf11.png b/.install/.kodi/userdata/Thumbnails/e/e62daf11.png
deleted file mode 100644
index d76b635e5..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/e/e62daf11.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/e/e93d5c85.png b/.install/.kodi/userdata/Thumbnails/e/e93d5c85.png
deleted file mode 100644
index 0191330e6..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/e/e93d5c85.png and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/e/ec9c7b3b.jpg b/.install/.kodi/userdata/Thumbnails/e/ec9c7b3b.jpg
deleted file mode 100644
index a20b1fac6..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/e/ec9c7b3b.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/f/f7021412.jpg b/.install/.kodi/userdata/Thumbnails/f/f7021412.jpg
deleted file mode 100644
index 496696746..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/f/f7021412.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/f/f94acfb4.jpg b/.install/.kodi/userdata/Thumbnails/f/f94acfb4.jpg
deleted file mode 100644
index 439f6505e..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/f/f94acfb4.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/f/f97a02b1.jpg b/.install/.kodi/userdata/Thumbnails/f/f97a02b1.jpg
deleted file mode 100644
index 752dd19b5..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/f/f97a02b1.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/Thumbnails/f/fae14ceb.jpg b/.install/.kodi/userdata/Thumbnails/f/fae14ceb.jpg
deleted file mode 100644
index a20b1fac6..000000000
Binary files a/.install/.kodi/userdata/Thumbnails/f/fae14ceb.jpg and /dev/null differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Captcha.raw b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Captcha.raw
new file mode 100644
index 000000000..41d27f1c6
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Captcha.raw differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_www_zt-za_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_www_zt-za_com.txt
new file mode 100644
index 000000000..3d46abfe4
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_www_zt-za_com.txt
@@ -0,0 +1 @@
+swp_token=1605043494:0cb63ecc71f6d029418e827aed3e7c9f:c8201b8ffdb3703b1d01884bce4f7584
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_com.txt
new file mode 100644
index 000000000..ec58db767
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_com.txt
@@ -0,0 +1 @@
+swp_token=1603922533:31aac141fc8c1f57a856ac4be928d2c0:067e0bd7719ed36e424bd1d672ebecdb
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_net.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_net.txt
new file mode 100644
index 000000000..535c7f72f
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_net.txt
@@ -0,0 +1 @@
+swp_token=1603922519:96ed2062fc796339ee60eb07e53b8a30:f904797b23b7e171c91e754aaf39ae74
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/challenge.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/challenge.png
new file mode 100644
index 000000000..ebde863e7
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/challenge.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_cinemegatoil_org.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_cinemegatoil_org.txt
new file mode 100644
index 000000000..eefdbe3d2
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_cinemegatoil_org.txt
@@ -0,0 +1 @@
+PHPSESSID=78e4d3599bdbe46db4d8552a0eaa5882
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_liens_free-telechargement_org.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_liens_free-telechargement_org.txt
new file mode 100644
index 000000000..d772dc358
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_liens_free-telechargement_org.txt
@@ -0,0 +1 @@
+; __cfduid=d47ef5a3629c709aa8d6405f7b7e2de881603912323;PHPSESSID=2ckoftm7qnkt1ljgjst87tfdf5;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wvw_zone-annuaire_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wvw_zone-annuaire_com.txt
new file mode 100644
index 000000000..e2bfeebd6
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wvw_zone-annuaire_com.txt
@@ -0,0 +1 @@
+PHPSESSID=78bc92d76f997ed1ec958b412cebb4f7;cf_clearance=3e9d1b257422bb4b4d348e6bcb36820a1409763c-1587067237-0-150;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wwv_zone-annuaire_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wwv_zone-annuaire_com.txt
new file mode 100644
index 000000000..80e2e2f10
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wwv_zone-annuaire_com.txt
@@ -0,0 +1 @@
+cf_clearance=8b2bd89b3c1f704fd5d93990e651764031dbae6f-1586961259-0-150;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www2_zone-warez_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www2_zone-warez_com.txt
new file mode 100644
index 000000000..18a1d7732
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www2_zone-warez_com.txt
@@ -0,0 +1 @@
+cf_clearance=a4d987f691ebfc88e00e8ca2ec3a64d2e460c4d8-1587069524-0-150;PHPSESSID=f1ee2m94s8au1m6n70d1e6hn54;dle_cache=yes;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_dl-protect1_co.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_dl-protect1_co.txt
new file mode 100644
index 000000000..a815afb8f
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_dl-protect1_co.txt
@@ -0,0 +1 @@
+cf_clearance=360d43600100367bf69621b836c7542bb1b46afe-1586420453-0-150;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_extreme-down_ninja.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_extreme-down_ninja.txt
new file mode 100644
index 000000000..4a9195773
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_extreme-down_ninja.txt
@@ -0,0 +1 @@
+cf_clearance=497f80648d29715ba0ec64db468cf5cdc6302226-1587239340-0-150;PHPSESSID=hgktt7llree1pvfvijba52f8q1;ed_last_visit=1587246540;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_ianimes_org.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_ianimes_org.txt
new file mode 100644
index 000000000..2ea90642f
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_ianimes_org.txt
@@ -0,0 +1 @@
+__cf_bm=1fa31255d612253a9c312090b73adfe7e0d89ecb-1587075762-1800-Aalb3+NMEpaapK0WWv4+tlNo90yVcKCNOfR3a5mleHinzRNTFz2Z43pyLcbqUdMNjrWM4VR0dQpprObT9iUP8I8=;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_neko-sama_fr.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_neko-sama_fr.txt
new file mode 100644
index 000000000..b843a9d36
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_neko-sama_fr.txt
@@ -0,0 +1 @@
+cf_clearance=5d9a052ef4b923889d250eea819e7e8b25300f59-1587066184-0-150;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zt-za_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zt-za_com.txt
new file mode 100644
index 000000000..0f9ef5dae
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zt-za_com.txt
@@ -0,0 +1 @@
+swp_token=1607198571:9341cdb1e538727625702349a5a0ddd7:4044fb8c7c44a4890c67a9c17c77fbc4
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zustream_biz.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zustream_biz.txt
new file mode 100644
index 000000000..f59946310
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zustream_biz.txt
@@ -0,0 +1 @@
+starstruck_0e1ee930605fd5ae5b10792311eb44d8=369447d3649ec249d188af0c95195c03;cf_clearance=fab6f52e4affbf45915501f908ce6640ba594d58-1587239427-0-150;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_zt-protect_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_zt-protect_com.txt
new file mode 100644
index 000000000..3d682430e
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_zt-protect_com.txt
@@ -0,0 +1 @@
+zt_protect_session=eyJpdiI6Im9uclVNNFRPZVdHRUUyZE1KZVVWbUE9PSIsInZhbHVlIjoic285TElyYTR2ZkI3OThZaENXbGxjTUdrNXFlZWR2TWpmOGFaNW9wTmVORGZyRGEySDlCU0VUWTdPYnFKTXRMaSIsIm1hYyI6ImJiYzkzYTkwMjY1MzMwNjEzNTgyMmUxZTk3MzUxNjQ2ZTNkZGU0MzJlNGU0OGY4OTdiNTM5MDEyZTI2OWUyODgifQ%3D%3D;XSRF-TOKEN=eyJpdiI6InZMd2xmblJLRllBUEhCcit0dnY0MHc9PSIsInZhbHVlIjoieGpOSjlmTGhKemExdGkwNENtR3UrVm5UWEhKWHBUXC9Qc01tN1ZMMTFHcEo0UjFHUGZCZ01JNEVjZDQ1Z3ozNEoiLCJtYWMiOiJjZTgyM2MxZDkxN2U4ZTNjOWU1MzAxZDczNmQ5ZjU1N2U2M2VmMGQwMTQ1MzE3ZDVmNDJiM2ZhZTE5NDViZmZhIn0%3D;
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml b/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml
index 362cd5b5e..b21e1f24c 100644
--- a/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml
+++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml
@@ -1,15 +1,15 @@
500
false
- 92ab39516970ab9d86396866456ec9b6
+ e275e5c42fc0f498521343e1e19a4479
w1280
false
true
lightcoral
50
-
- false
+ /home/xbian/astroport/films/
+ true
false
1
true
@@ -51,17 +51,21 @@
special://userdata/addon_data/plugin.video.vstream/Films
special://userdata/addon_data/plugin.video.vstream/Series
0
- false
+ true
500
- Films
- Animes
-
-
+ FASTRXBIAN
+ AASTRXBIAN
+
+ films
+ animes
+ /home/xbian/
+ special://userdata/addon_data/plugin.video.vstream/Enregistrement
1
2
true
true
+ true
true
true
true
@@ -70,6 +74,7 @@
true
true
true
+ true
true
true
true
@@ -79,6 +84,7 @@
true
true
true
+ true
true
true
true
@@ -95,10 +101,13 @@
true
true
true
+ true
true
true
true
true
+ true
+ true
true
true
true
@@ -108,37 +117,55 @@
true
true
true
+ true
+ true
true
true
+ true
+ true
true
true
+ true
+ true
true
true
true
+ true
true
true
+ true
true
+ true
true
+ true
+ true
true
true
+ true
+ true
true
true
true
+ true
true
+ true
true
true
true
true
true
+ true
+ true
+ true
true
w342
500
-
+ 0.8.3
- 2020-12-12 02:55:53.502862
+ 2020-12-14 22:22:54.593849
0.8.3
false
-
+ https://www.zone-warez.com/
@@ -162,5 +189,5 @@
false
500
-
+ https://www.zt-za.com/
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test0.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test0.png
new file mode 100644
index 000000000..7474ae799
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test0.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test1.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test1.png
new file mode 100644
index 000000000..0085cc3f2
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test1.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test2.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test2.png
new file mode 100644
index 000000000..0eae90034
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test2.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test3.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test3.png
new file mode 100644
index 000000000..d81c87eac
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test3.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test4.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test4.png
new file mode 100644
index 000000000..8862ca5f1
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test4.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test5.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test5.png
new file mode 100644
index 000000000..c558fad99
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test5.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test6.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test6.png
new file mode 100644
index 000000000..7670bab38
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test6.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test7.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test7.png
new file mode 100644
index 000000000..9bd52555b
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test7.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test8.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test8.png
new file mode 100644
index 000000000..0b3a9195c
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test8.png differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/video_cache.db b/.install/.kodi/userdata/addon_data/plugin.video.vstream/video_cache.db
new file mode 100644
index 000000000..dc9b08c83
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/video_cache.db differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db b/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db
index 4caa54a00..21bcd7036 100644
Binary files a/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/access_manager.json b/.install/.kodi/userdata/addon_data/plugin.video.youtube/access_manager.json
new file mode 100644
index 000000000..704a7caab
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.youtube/access_manager.json
@@ -0,0 +1,19 @@
+{
+ "access_manager": {
+ "current_user": "0",
+ "developers": {},
+ "last_origin": "plugin.video.youtube",
+ "users": {
+ "0": {
+ "access_token": "",
+ "id": "af085b4b77e247e5b4ba97d958567d9f",
+ "last_key_hash": "94aa8ff1499c32985b71113ea0b99b60",
+ "name": "Default",
+ "refresh_token": "",
+ "token_expires": -1,
+ "watch_history": "HL",
+ "watch_later": " WL"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/api_keys.json b/.install/.kodi/userdata/addon_data/plugin.video.youtube/api_keys.json
new file mode 100644
index 000000000..a242a0393
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.youtube/api_keys.json
@@ -0,0 +1,10 @@
+{
+ "keys": {
+ "developer": {},
+ "personal": {
+ "api_key": "",
+ "client_id": "",
+ "client_secret": ""
+ }
+ }
+}
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/cache.sqlite b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/cache.sqlite
new file mode 100644
index 000000000..1bb496b63
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/cache.sqlite differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/data_cache.sqlite b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/data_cache.sqlite
new file mode 100644
index 000000000..06a99d3c0
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/data_cache.sqlite differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/search.sqlite b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/search.sqlite
new file mode 100644
index 000000000..cb89c44b5
Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/search.sqlite differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/settings.xml b/.install/.kodi/userdata/addon_data/plugin.video.youtube/settings.xml
new file mode 100644
index 000000000..6d85e087d
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.video.youtube/settings.xml
@@ -0,0 +1,102 @@
+
+
+ 0
+ true
+ false
+ false
+ 10
+ 9
+ true
+
+
+ 0.0.0.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+ false
+ true
+ 50152
+ 8
+ true
+ 85
+ false
+
+ 0
+ 10
+ false
+
+ false
+ 0
+ false
+ 1
+
+
+
+
+ 3
+ false
+ false
+
+ true
+ true
+ false
+
+
+
+
+ true
+ false
+
+ true
+ true
+ true
+
+ true
+ true
+ true
+ true
+ true
+ true
+ false
+ true
+ true
+ true
+ false
+ false
+ true
+ true
+ true
+ true
+ true
+ true
+ true
+ true
+ true
+
+ true
+
+ fr
+ 43.6046,1.4451
+ 500
+ true
+ false
+ false
+ false
+ FR
+ false
+ false
+ true
+
diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome
index 97afcb005..e69de29bb 100644
--- a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome
+++ b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome
@@ -1 +0,0 @@
-2020-12-13-1428
diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db
index 65bd225c3..185a5c628 100644
Binary files a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db and b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db differ
diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/hide.backuphome b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/hide.backuphome
new file mode 100644
index 000000000..c3f29566b
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/hide.backuphome
@@ -0,0 +1,2 @@
+I01
+.
\ No newline at end of file
diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy
index e69de29bb..1b8447b4a 100644
--- a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy
+++ b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy
@@ -0,0 +1 @@
+2020-12-16-1552
diff --git a/.install/.kodi/userdata/addon_data/service.xbmc.versioncheck/settings.xml b/.install/.kodi/userdata/addon_data/service.xbmc.versioncheck/settings.xml
new file mode 100644
index 000000000..cccd6e845
--- /dev/null
+++ b/.install/.kodi/userdata/addon_data/service.xbmc.versioncheck/settings.xml
@@ -0,0 +1,6 @@
+
+ 18.9 stable
+ false
+ false
+ true
+
diff --git a/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml b/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml
index cb2abfea8..2e550c7e8 100644
--- a/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml
+++ b/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml
@@ -1,60 +1,68 @@
- false
-
- false
- false
-
-
-
- false
- false
-
- false
-
-
-
-
- false
- false
-
- false
- false
false
+
+
+
+ false
+ false
+
+ false
+ false
+ false
+
+
+ false
+ false
+
+
-
+
+ false
+ false
+
+ false
+ false
+ false
+
+
+ inetd
+ File
- Reloading values for wlan0
-
-
-
- DHCP
-
- Click to load...
- 5.4.75
- Click to load...
- Unknown
- Cliquer pour charger
- requis
- File
- Cliquer pour charger
- Click to load...
- Cliquer pour charger
-
- DHCP
-
-
- eth0
- Cliquer pour charger
+ wlan2
+ Click to load...
+
+ 1
+
+
Cliquer pour charger
Cliquer pour charger
-
- daily
- daily
- daily
- Device
- daily
- inetd
-
+ Cliquer pour charger
+ File
+ Cliquer pour charger
+
+ Cliquer pour charger
+
+ Cliquer pour charger
+
+ requis
+ inetd
+ DHCP
+ DHCP
+
+ 5.4.75
+
+
+ daily
+ requis
+ daily
+
+ Unknown
+ daily
+ Recherche des réseaux disponibles
+ daily
+
+
+
diff --git a/.install/.kodi/userdata/favourites.xml b/.install/.kodi/userdata/favourites.xml
index 2be744fbb..c32380fde 100644
--- a/.install/.kodi/userdata/favourites.xml
+++ b/.install/.kodi/userdata/favourites.xml
@@ -1 +1,3 @@
-
+
+ ActivateWindow(10025,"plugin://plugin.video.vstream/?function=load&sFav=load&site=astroport&siteUrl=http%3a%2f%2fvenom&title=ASTROPORT%20_PROFIL_%20(_LOGIN_)%20(_MDP_)",return)
+
diff --git a/.install/.kodi/userdata/guisettings.xml b/.install/.kodi/userdata/guisettings.xml
index cd12437af..3890d1553 100644
--- a/.install/.kodi/userdata/guisettings.xml
+++ b/.install/.kodi/userdata/guisettings.xml
@@ -12,7 +12,7 @@
false
false
2
- ALSA:default
+ PI:Both
2
2
false
@@ -77,7 +77,7 @@
true
fr
false
- English QWERTY
+ French AZERTY
mediadefault
DEFAULT
France
@@ -89,8 +89,8 @@
original
regional
regional
- Etc/UTC
-
+ Europe/Paris
+ France
regional
false
Default
@@ -156,7 +156,7 @@
0
false
- 14
+ 17
true
true
true
@@ -209,7 +209,7 @@
false
true
XBian
- f884d8e4-f4f7-43da-9dc0-451d91b436d6
+ ca06be1c-dd53-4909-aef9-1cae7fdcf17a
false
25
true
@@ -226,7 +226,7 @@
false
true
- 8080
+ 8181
xbmc
webinterface.default
@@ -249,7 +249,7 @@
false
arial.ttf
28
- English
+ English,French
false
false
@@ -442,12 +442,12 @@
- 3
+ 2
0
true
- 1591
+ 53439
@@ -490,13 +490,13 @@
0
0
- false
+ true
false
0
false
- 1.000000
+ 0.762222
diff --git a/.install/.kodi/userdata/sources.xml b/.install/.kodi/userdata/sources.xml
index 686f393e1..40012dcb4 100644
--- a/.install/.kodi/userdata/sources.xml
+++ b/.install/.kodi/userdata/sources.xml
@@ -20,7 +20,7 @@
SuperRepo.org Virtual Disk
- http://srp.nu/jarvis/
+ http://srp.nu/leia/
true