diff --git a/.install/.kodi/addons/packages/metadata.album.universal-3.1.2.zip b/.install/.kodi/addons/packages/metadata.album.universal-3.1.2.zip new file mode 100644 index 000000000..472a06142 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.album.universal-3.1.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.artists.universal-4.3.2.zip b/.install/.kodi/addons/packages/metadata.artists.universal-4.3.2.zip new file mode 100644 index 000000000..00a19b475 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.artists.universal-4.3.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.allmusic.com-3.2.1.zip b/.install/.kodi/addons/packages/metadata.common.allmusic.com-3.2.1.zip new file mode 100644 index 000000000..296099384 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.allmusic.com-3.2.1.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.fanart.tv-3.6.2.zip b/.install/.kodi/addons/packages/metadata.common.fanart.tv-3.6.2.zip new file mode 100644 index 000000000..7b493a7d8 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.fanart.tv-3.6.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.imdb.com-3.1.5.zip b/.install/.kodi/addons/packages/metadata.common.imdb.com-3.1.5.zip new file mode 100644 index 000000000..6531d95c9 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.imdb.com-3.1.5.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.musicbrainz.org-2.2.2.zip b/.install/.kodi/addons/packages/metadata.common.musicbrainz.org-2.2.2.zip new file mode 100644 index 000000000..df8642bbe Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.musicbrainz.org-2.2.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.1.10.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.1.10.zip new file mode 100644 index 000000000..baaa3cc9f Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.1.10.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.1.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.1.zip new file mode 100644 index 000000000..1f86abee7 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.1.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.11.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.11.zip new file mode 100644 index 000000000..f6c510ad8 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.11.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.2.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.2.zip new file mode 100644 index 000000000..e08039ec7 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.3.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.3.zip new file mode 100644 index 000000000..d644ca946 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.3.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.5.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.5.zip new file mode 100644 index 000000000..267708e94 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.5.zip differ diff --git a/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.6.zip b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.6.zip new file mode 100644 index 000000000..3adcd83ac Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.common.themoviedb.org-3.2.6.zip differ diff --git a/.install/.kodi/addons/packages/metadata.themoviedb.org-5.1.10.zip b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.1.10.zip new file mode 100644 index 000000000..0b5fd51e1 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.1.10.zip differ diff --git a/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.0.zip b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.0.zip new file mode 100644 index 000000000..01e8923fc Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.0.zip differ diff --git a/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.1.zip b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.1.zip new file mode 100644 index 000000000..48569b8a5 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.1.zip differ diff --git a/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.2.zip b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.2.zip new file mode 100644 index 000000000..5e159da45 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.4.zip b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.4.zip new file mode 100644 index 000000000..1e7ad3253 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.themoviedb.org-5.2.4.zip differ diff --git a/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.10.zip b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.10.zip new file mode 100644 index 000000000..33fea5a5d Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.10.zip differ diff --git a/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.2.zip b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.2.zip new file mode 100644 index 000000000..2b7c8fb2f Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.2.zip differ diff --git a/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.3.zip b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.3.zip new file mode 100644 index 000000000..0c3ac4aa0 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.3.zip differ diff --git a/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.4.zip b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.4.zip new file mode 100644 index 000000000..abf0be6e0 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.4.zip differ diff --git a/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.5.zip b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.5.zip new file mode 100644 index 000000000..c04123855 Binary files /dev/null and b/.install/.kodi/addons/packages/metadata.tvshows.themoviedb.org-3.5.5.zip differ diff --git a/.install/.kodi/addons/packages/plugin.audio.lastfmtube-1.0.0.zip b/.install/.kodi/addons/packages/plugin.audio.lastfmtube-1.0.0.zip new file mode 100644 index 000000000..bb1bc36a6 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.audio.lastfmtube-1.0.0.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.couchpotato_manager-0.0.8.zip b/.install/.kodi/addons/packages/plugin.video.couchpotato_manager-0.0.8.zip new file mode 100644 index 000000000..861554dd7 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.couchpotato_manager-0.0.8.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.d8-2.0.3.zip b/.install/.kodi/addons/packages/plugin.video.d8-2.0.3.zip new file mode 100644 index 000000000..6aeb9a2cf Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.d8-2.0.3.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.filmsforaction-1.1.1.zip b/.install/.kodi/addons/packages/plugin.video.filmsforaction-1.1.1.zip new file mode 100644 index 000000000..045d5d9ca Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.filmsforaction-1.1.1.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.francetv-1.1.0.zip b/.install/.kodi/addons/packages/plugin.video.francetv-1.1.0.zip new file mode 100644 index 000000000..d83d65fbe Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.francetv-1.1.0.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.vstream-0.7.7.zip b/.install/.kodi/addons/packages/plugin.video.vstream-0.7.7.zip new file mode 100644 index 000000000..58f585a87 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.vstream-0.7.7.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.vstream-0.7.8.zip b/.install/.kodi/addons/packages/plugin.video.vstream-0.7.8.zip new file mode 100644 index 000000000..912820180 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.vstream-0.7.8.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.vstream-0.7.9.zip b/.install/.kodi/addons/packages/plugin.video.vstream-0.7.9.zip new file mode 100644 index 000000000..b2fd7b436 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.vstream-0.7.9.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.vstream-0.8.0.zip b/.install/.kodi/addons/packages/plugin.video.vstream-0.8.0.zip new file mode 100644 index 000000000..737e47a7f Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.vstream-0.8.0.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.vstream-0.8.2.zip b/.install/.kodi/addons/packages/plugin.video.vstream-0.8.2.zip new file mode 100644 index 000000000..5af4d7a4a Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.vstream-0.8.2.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.7.0.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.7.0.zip new file mode 100644 index 000000000..35ade2044 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.7.0.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.0.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.0.zip new file mode 100644 index 000000000..51023de51 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.0.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.2.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.2.zip new file mode 100644 index 000000000..051a34a6c Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.2.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.3.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.3.zip new file mode 100644 index 000000000..da2735f22 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.3.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.5.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.5.zip new file mode 100644 index 000000000..6b024ca75 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.5.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.6.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.6.zip new file mode 100644 index 000000000..9729e3d19 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.6.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.7.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.7.zip new file mode 100644 index 000000000..e7c2a1a14 Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.7.zip differ diff --git a/.install/.kodi/addons/packages/plugin.video.youtube-6.8.8.zip b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.8.zip new file mode 100644 index 000000000..b849ae97b Binary files /dev/null and b/.install/.kodi/addons/packages/plugin.video.youtube-6.8.8.zip differ diff --git a/.install/.kodi/addons/packages/script.module.dateutil-2.8.1.zip b/.install/.kodi/addons/packages/script.module.dateutil-2.8.1.zip new file mode 100644 index 000000000..89a7f6601 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.dateutil-2.8.1.zip differ diff --git a/.install/.kodi/addons/packages/script.module.future-0.17.1.zip b/.install/.kodi/addons/packages/script.module.future-0.17.1.zip new file mode 100644 index 000000000..251833434 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.future-0.17.1.zip differ diff --git a/.install/.kodi/addons/packages/script.module.inputstreamhelper-0.5.2.zip b/.install/.kodi/addons/packages/script.module.inputstreamhelper-0.5.2.zip new file mode 100644 index 000000000..5d0b2dda9 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.inputstreamhelper-0.5.2.zip differ diff --git a/.install/.kodi/addons/packages/script.module.parsedom-2.5.2.zip b/.install/.kodi/addons/packages/script.module.parsedom-2.5.2.zip new file mode 100644 index 000000000..17e817588 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.parsedom-2.5.2.zip differ diff --git a/.install/.kodi/addons/packages/script.module.pylast-1.8.0.zip b/.install/.kodi/addons/packages/script.module.pylast-1.8.0.zip new file mode 100644 index 000000000..565ae50e5 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.pylast-1.8.0.zip differ diff --git a/.install/.kodi/addons/packages/script.module.simplecache-1.0.18.zip b/.install/.kodi/addons/packages/script.module.simplecache-1.0.18.zip new file mode 100644 index 000000000..50c6096f2 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.simplecache-1.0.18.zip differ diff --git a/.install/.kodi/addons/packages/script.module.simplecache-1.0.20.zip b/.install/.kodi/addons/packages/script.module.simplecache-1.0.20.zip new file mode 100644 index 000000000..e6d0948bb Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.simplecache-1.0.20.zip differ diff --git a/.install/.kodi/addons/packages/script.module.six-1.13.0.zip b/.install/.kodi/addons/packages/script.module.six-1.13.0.zip new file mode 100644 index 000000000..04f68ba65 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.six-1.13.0.zip differ diff --git a/.install/.kodi/addons/packages/script.module.xbmcswift2-13.0.0.zip b/.install/.kodi/addons/packages/script.module.xbmcswift2-13.0.0.zip new file mode 100644 index 000000000..d15f73708 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.xbmcswift2-13.0.0.zip differ diff --git a/.install/.kodi/addons/packages/script.module.xbmcswift2-13.0.3.zip b/.install/.kodi/addons/packages/script.module.xbmcswift2-13.0.3.zip new file mode 100644 index 000000000..5bba6af26 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.xbmcswift2-13.0.3.zip differ diff --git a/.install/.kodi/addons/packages/script.module.xbmcswift2-2.5.1.zip b/.install/.kodi/addons/packages/script.module.xbmcswift2-2.5.1.zip new file mode 100644 index 000000000..74583cfe8 Binary files /dev/null and b/.install/.kodi/addons/packages/script.module.xbmcswift2-2.5.1.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.10.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.10.zip new file mode 100644 index 000000000..f7abb30d6 Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.10.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.11.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.11.zip new file mode 100644 index 000000000..367c77ddc Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.11.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.4.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.4.zip new file mode 100644 index 000000000..75bfb56d7 Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.4.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.5.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.5.zip new file mode 100644 index 000000000..cf16544ae Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.5.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.6.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.6.zip new file mode 100644 index 000000000..642ac9c1c Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.6.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.8.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.8.zip new file mode 100644 index 000000000..12a11885a Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.8.zip differ diff --git a/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.9.zip b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.9.zip new file mode 100644 index 000000000..667a96336 Binary files /dev/null and b/.install/.kodi/addons/packages/service.xbmc.versioncheck-0.5.9.zip differ diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/LICENSE.md b/.install/.kodi/addons/plugin.audio.lastfmtube/LICENSE.md new file mode 100644 index 000000000..733c07236 --- /dev/null +++ b/.install/.kodi/addons/plugin.audio.lastfmtube/LICENSE.md @@ -0,0 +1,675 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + {project} Copyright (C) {year} {fullname} + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. + diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/addon.xml b/.install/.kodi/addons/plugin.audio.lastfmtube/addon.xml new file mode 100644 index 000000000..b0de28eb0 --- /dev/null +++ b/.install/.kodi/addons/plugin.audio.lastfmtube/addon.xml @@ -0,0 +1,24 @@ + + + + + + + + + audio + + + Play your favorite music videos powered by YouTube and your Last.FM scrobble data. + The world's largest online music catalogue, powered by your scrobbles and paired with YouTube. + all + GNU GENERAL PUBLIC LICENSE. Version 3, June 2007 + https://github.com/Lunatixz/XBMC_Addons/tree/master/plugin.audio.lastfmtube + https://forum.kodi.tv/showthread.php?tid=315688 + + + icon.png + fanart.jpg + + + \ No newline at end of file diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/default.py b/.install/.kodi/addons/plugin.audio.lastfmtube/default.py new file mode 100644 index 000000000..31912a3f5 --- /dev/null +++ b/.install/.kodi/addons/plugin.audio.lastfmtube/default.py @@ -0,0 +1,359 @@ +# Copyright (C) 2016 Lunatixz +# +# +# This file is part of LastFM Tube. +# +# LastFM Tube is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# LastFM Tube is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with LastFM Tube. If not, see . +# -*- coding: utf-8 -*- +import os, sys, time, datetime, pylast, re +import urllib, socket, json, collections, random +import xbmc, xbmcgui, xbmcplugin, xbmcvfs, xbmcaddon + +from simplecache import use_cache, SimpleCache + +# Plugin Info +ADDON_ID = 'plugin.audio.lastfmtube' +REAL_SETTINGS = xbmcaddon.Addon(id=ADDON_ID) +ADDON_NAME = REAL_SETTINGS.getAddonInfo('name') +SETTINGS_LOC = REAL_SETTINGS.getAddonInfo('profile').decode('utf-8') +ADDON_PATH = REAL_SETTINGS.getAddonInfo('path').decode('utf-8') +ADDON_VERSION = REAL_SETTINGS.getAddonInfo('version') +ICON = REAL_SETTINGS.getAddonInfo('icon') +FANART = REAL_SETTINGS.getAddonInfo('fanart') + +## GLOBALS ## +TIMEOUT = 15 +USER1 = REAL_SETTINGS.getSetting('USER1') +PASS1 = REAL_SETTINGS.getSetting('PASS1') +USER2 = REAL_SETTINGS.getSetting('USER2').replace('Enter Username','') +PASS2 = REAL_SETTINGS.getSetting('PASS2').replace('Enter Password','') +MEDIA_LIMIT= [25,50,100,250][int(REAL_SETTINGS.getSetting('MEDIA_LIMIT'))] +RANDOM_PLAY= REAL_SETTINGS.getSetting('RANDOM_PLAY') == "true" +YTURL = 'plugin://plugin.video.youtube/play/?video_id=' +YSSEARCH = 'plugin://plugin.video.youtube/kodion/search/query/?q=%s' +DEBUG = REAL_SETTINGS.getSetting('Enable_Debugging') == 'true' +API_KEY = REAL_SETTINGS.getSetting('LASTFM_APIKEY') +API_SECRET = REAL_SETTINGS.getSetting('LASTFM_APISECRET') + +USERLST = [] +if len(USER1) > 0: + USERLST.append((USER1, '', 0, USER1,pylast.md5(PASS1))) +if len(USER2) > 0: + USERLST.append((USER2, '', 0, USER2,pylast.md5(PASS2))) + +MENULST = (('Top Tracks' , '', 3), + ('Loved Tracks' , '', 4), + ('Recently Played', '', 2)) + +def log(msg, level = xbmc.LOGDEBUG): + if DEBUG == True: + xbmc.log(ADDON_ID + '-' + ADDON_VERSION + '-' + stringify(msg), level) + +def convertString(string): + try: + string = unicode(string, "ascii") + except UnicodeError: + string = unicode(string, "utf-8") + else: + string = stringify(string) + return string + +def uni(string, encoding='utf-8'): + if isinstance(string, basestring): + if not isinstance(string, unicode): + string = unicode(string, encoding) + return string + +def ascii(string): + if isinstance(string, basestring): + if isinstance(string, unicode): + string = string.encode('ascii', 'ignore') + return string + +def utf(string): + if isinstance(string, basestring): + if not isinstance(string, unicode): + string = string.encode('utf-8', 'ignore') + return string + +def encodeString(string): + return ''.join(i for i in string.encode('utf8') if ord(i)<128) + +def stringify(string): + if isinstance(string, list): + string = stringify(string[0]) + elif isinstance(string, (int, float, long, complex, bool)): + string = str(string) + if isinstance(string, basestring): + if not isinstance(string, unicode): + string = unicode(string, 'utf-8') + elif isinstance(string, unicode): + string = string.encode('ascii', 'ignore') + else: + string = string.encode('utf-8', 'ignore') + return string + +def getParams(): + param=[] + if len(sys.argv[2])>=2: + params=sys.argv[2] + cleanedparams=params.replace('?','') + if (params[len(params)-1]=='/'): + params=params[0:len(params)-2] + pairsofparams=cleanedparams.split('&') + param={} + for i in range(len(pairsofparams)): + splitparams={} + splitparams=pairsofparams[i].split('=') + if (len(splitparams))==2: + param[splitparams[0]]=splitparams[1] + return param + +socket.setdefaulttimeout(TIMEOUT) +class LastFMTube(): + def __init__(self): + self.cache = SimpleCache() + + + def mainMenu(self): + for item in USERLST: + self.addDir(*item) + + + def browseMenu(self, user, pwd): + for item in MENULST: + print item + self.addDir(*item,**{'user':user,'pwd':pwd}) + + + def sendJSON(self, command): + data = '' + try: + data = xbmc.executeJSONRPC(uni(command)) + except UnicodeEncodeError: + data = xbmc.executeJSONRPC(ascii(command)) + return data + + + def loadJson(self, string): + if len(string) == 0: + return {} + try: + return json.loads(uni(string)) + except Exception,e: + return {} + + + def escapeDirJSON(self, dir_name): + mydir = uni(dir_name) + if (mydir.find(":")): + mydir = mydir.replace("\\", "\\\\") + return mydir + + + @use_cache(31) + def getDirectory(self, path, media='video', ignore='false', method='random', order='ascending', end=0, start=0, filter={}): + json_query = ('{"jsonrpc":"2.0","method":"Files.GetDirectory","params":{"directory":"%s","properties":["thumbnail","fanart","plot","duration","playcount"],"media":"%s","sort":{"ignorearticle":%s,"method":"%s","order":"%s"},"limits":{"end":%s,"start":%s}},"id":1}' % (self.escapeDirJSON(path), media, ignore, method, order, end, start)) + json_response = self.sendJSON(json_query) + return self.loadJson(json_response) + + + def buildMenu(self, url, auto=False): + log('buildMenu, url = ' + url) + json_response = self.getDirectory(url) + if 'result' in json_response and(json_response['result'] != None) and 'files' in json_response['result']: + response = json_response['result']['files'] + response = [response[random.randint(0,len(response)-1)]] if auto == True else response + for item in response: + label = encodeString(item.get('label','')) + + if (item.get('filetype','') or '') == 'file': + url = item.get('file','') + infoLabels ={"label":label ,"title":label ,"plot":item.get('plot',''), "duration":(item.get('duration','') or 0), "playcount":(item.get('playcount','') or 0)} + infoArt ={"thumb":(item.get('thumbnail','') or ICON),"poster":(item.get('thumbnail','') or ICON),"fanart":(item.get('fanart','') or FANART)} + self.addLink(label, url, 9, infoList=infoLabels, infoArt=infoArt) + + + def getRecentTracks(self, user, pwd, auto=False, rand=False, limit=250): + """ + Get list of recently played tracks + """ + playList = [] + playCount = 0 + log('getRecentTracks, user = ' + user) + network = pylast.LastFMNetwork(api_key=API_KEY, api_secret=API_SECRET, username=user, password_hash=pwd) + if auto == False: + self.addDir('[B]Create Playlist (%d)[/B]'%MEDIA_LIMIT, '5', 5, user, pwd) + user = network.get_user(network.username) + for track in user.get_recent_tracks(limit=limit): + artist = track.track.get_artist().name + title = track.track.get_title() + artist = encodeString(artist) + title = encodeString(title) + name = ('{0!s} - {1!s}'.format(artist, title)) + url = YSSEARCH%(name.replace(' ','%20')) + if auto == True: + if rand == True and random.choice([True,False]) == True: + continue + self.buildMenu(url, True) + playCount += 1 + if playCount >= MEDIA_LIMIT: + break + else: + self.addDir(name, url, 1) + + + def getLovedTracks(self, user, pwd, auto=False, rand=False, limit=250): + """Returns this user's loved track""" + playList = [] + playCount= 0 + log('getPlaylists, user = ' + user) + network = pylast.LastFMNetwork(api_key=API_KEY, api_secret=API_SECRET, username=user, password_hash=pwd) + if auto == False: + self.addDir('[B]Create Playlist (%d)[/B]'%MEDIA_LIMIT, '7', 7, user, pwd) + user = network.get_user(network.username) + for loved in user.get_loved_tracks(limit=limit): + artist = loved.track.get_artist().name + title = loved.track.get_title() + artist = encodeString(artist) + title = encodeString(title) + name = ('{0!s} - {1!s}'.format(artist, title)) + url = YSSEARCH%(name.replace(' ','%20')) + if auto == True: + #lazy method, won't result in MEDIA_LIMIT + if rand == True and random.choice([True,False]) == True: + continue + self.buildMenu(url, True) + playCount += 1 + if playCount >= MEDIA_LIMIT: + break + else: + self.addDir(name, url, 1) + + + def getTopTracks(self, user, pwd, auto=False, rand=False): + """Returns the most played tracks as a sequence of TopItem objects.""" + playList = [] + playCount= 0 + log('getTopTracks, user = ' + user) + network = pylast.LastFMNetwork(api_key=API_KEY, api_secret=API_SECRET, username=user, password_hash=pwd) + if auto == False: + self.addDir('[B]Create Playlist (%d)[/B]'%MEDIA_LIMIT, '6', 6, user, pwd) + user = network.get_user(network.username) + for top in user.get_top_tracks() : + name = encodeString(str(top.item)) + url = YSSEARCH%(name.replace(' ','%20')) + if auto == True: + #lazy method, won't result in MEDIA_LIMIT + if rand == True and random.choice([True,False]) == True: + continue + self.buildMenu(url, True) + playCount += 1 + if playCount >= MEDIA_LIMIT: + break + else: + self.addDir(name, url, 1) + + + def resolveURL(self, url): + log('resolveURL, url = ' + url) + if len(re.findall('http[s]?://www.youtube.com/watch', url)) > 0: + return YTURL + url.split('/watch?v=')[1] + elif len(re.findall('http[s]?://youtu.be/', url)) > 0: + return YTURL + url.split('/youtu.be/')[1] + return url + + + def playVideo(self, name, url): + log('playVideo') + liz=xbmcgui.ListItem(name, path=url) + liz.setProperty("IsPlayable","true") + xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz) + + + def addLink(self, name, u, mode, user='', pwd='', infoList=False, infoArt=False, total=0): + log('addLink, name = ' + name) + liz=xbmcgui.ListItem(name) + liz.setProperty('IsPlayable', 'true') + if infoList == False: + liz.setInfo( type="Video", infoLabels={"label":name,"title":name} ) + else: + liz.setInfo(type="Video", infoLabels=infoList) + + if infoArt == False: + liz.setArt({'thumb':ICON,'fanart':FANART}) + else: + liz.setArt(infoArt) + u=sys.argv[0]+"?url="+urllib.quote_plus(u)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&user="+urllib.quote_plus(user)+"&pwd="+str(pwd) + xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,totalItems=total) + + + def addDir(self, name, u, mode, user='', pwd='', infoList=False, infoArt=False): + log('addDir, name = ' + name) + liz=xbmcgui.ListItem(name) + liz.setProperty('IsPlayable', 'false') + if infoList == False: + liz.setInfo(type="Video", infoLabels={"label":name,"title":name}) + else: + liz.setInfo(type="Video", infoLabels=infoList) + if infoArt == False: + liz.setArt({'thumb':ICON,'fanart':FANART}) + else: + liz.setArt(infoArt) + u=sys.argv[0]+"?url="+urllib.quote_plus(u)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&user="+urllib.quote_plus(user)+"&pwd="+str(pwd) + xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True) + + +params=getParams() +try: + url=urllib.unquote_plus(params["url"]) +except: + url=None +try: + name=urllib.unquote_plus(params["name"]) +except: + name=None +try: + user=urllib.unquote_plus(params["user"]) +except: + user=None +try: + pwd=urllib.unquote_plus(params["pwd"]) +except: + pwd=None +try: + mode=int(params["mode"]) +except: + mode=None + +log("Mode: "+str(mode)) +log("URL : "+str(url)) +log("Name: "+str(name)) +log("User: "+str(user)) +log("PWD : "+str(pwd)) + +if mode==None: LastFMTube().mainMenu() +elif mode == 0: LastFMTube().browseMenu(user, pwd) +elif mode == 1: LastFMTube().buildMenu(url) +elif mode == 2: LastFMTube().getRecentTracks(user, pwd) +elif mode == 3: LastFMTube().getTopTracks(user, pwd) +elif mode == 4: LastFMTube().getLovedTracks(user, pwd) +elif mode == 5: LastFMTube().getRecentTracks(user, pwd, True, RANDOM_PLAY) +elif mode == 6: LastFMTube().getTopTracks(user, pwd, True, RANDOM_PLAY) +elif mode == 7: LastFMTube().getLovedTracks(user, pwd, True, RANDOM_PLAY) +elif mode == 9: LastFMTube().playVideo(name, url) + +xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_NONE ) +xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_LABEL ) +xbmcplugin.endOfDirectory(int(sys.argv[1]),cacheToDisc=True) \ No newline at end of file diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/fanart.jpg b/.install/.kodi/addons/plugin.audio.lastfmtube/fanart.jpg new file mode 100644 index 000000000..1cd3d6f7c Binary files /dev/null and b/.install/.kodi/addons/plugin.audio.lastfmtube/fanart.jpg differ diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/icon.png b/.install/.kodi/addons/plugin.audio.lastfmtube/icon.png new file mode 100644 index 000000000..2b2b6c7af Binary files /dev/null and b/.install/.kodi/addons/plugin.audio.lastfmtube/icon.png differ diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/resources/language/resource.language.en_gb/strings.po b/.install/.kodi/addons/plugin.audio.lastfmtube/resources/language/resource.language.en_gb/strings.po new file mode 100644 index 000000000..9a80a4c06 --- /dev/null +++ b/.install/.kodi/addons/plugin.audio.lastfmtube/resources/language/resource.language.en_gb/strings.po @@ -0,0 +1,57 @@ +# Kodi Media Center language file +# Addon Name: LastFM Tube +# Addon id: plugin.audio.lastfmtube +# Addon Provider: Lunatixz +msgid "" +msgstr "" +"Project-Id-Version: plugin.audio.lastfmtube\n" +"Report-Msgid-Bugs-To: http://forum.kodi.tv/showthread.php?tid=315513\n" +"POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Lunatixz Translation Team\n" +"Language-Team: English (http://www.transifex.com/projects/p/xbmc-addons/language/en/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: en\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgctxt "#30000" +msgid "Enable Debugging [Log errors]" +msgstr "" + +msgctxt "#30001" +msgid "Username 1" +msgstr "" + +msgctxt "#30002" +msgid "Password" +msgstr "" + +msgctxt "#30003" +msgid "Username 2" +msgstr "" + +msgctxt "#30004" +msgid "Password" +msgstr "" + +msgctxt "#30005" +msgid "Playlist Media Limit" +msgstr "" + +msgctxt "#30006" +msgid "Randomize Playlist" +msgstr "" + +msgctxt "#30007" +msgid "Enter API-Key" +msgstr "" + +msgctxt "#30008" +msgid "Enter API-Secret Key" +msgstr "" + +msgctxt "#30009" +msgid "Use Personal Last.FM API KEY (recommended)" +msgstr "" diff --git a/.install/.kodi/addons/plugin.audio.lastfmtube/resources/settings.xml b/.install/.kodi/addons/plugin.audio.lastfmtube/resources/settings.xml new file mode 100644 index 000000000..cd75efcde --- /dev/null +++ b/.install/.kodi/addons/plugin.audio.lastfmtube/resources/settings.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/LICENSE.txt b/.install/.kodi/addons/plugin.video.filmsforaction/LICENSE.txt new file mode 100644 index 000000000..d159169d1 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/LICENSE.txt @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/README.md b/.install/.kodi/addons/plugin.video.filmsforaction/README.md new file mode 100644 index 000000000..eb833435d --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/README.md @@ -0,0 +1,46 @@ +Films For Action Videos: XBMC video add-on to watch the Films For Action videos from the FFA web site. + + Copyright (C) 2014 Jose Antonio Montes (jamontes) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + + +Initial release 1.0.0 + +[![Build Status](https://secure.travis-ci.org/jamontes/plugin.video.filmsforaction.png)](http://travis-ci.org/jamontes/plugin.video.filmsforaction) + +This XBMC plugin allows to watch the videos from the Films For Action web site (more than 2500 videos available). + +<< Films For Action is a community-powered learning library and alternative news center for people who want to change the world. + +At an International Level: +Films For Action uses the power of film to raise awareness of important social, environmental, and media-related issues not covered by the mainstream news. Our goal is to provide citizens with the information and perspectives essential to creating a more just, sustainable, and democratic society. + +At the Local Level: +On the ground, our City Chapters are working to create alternative media channels that will inform, connect, and inspire action at a community level. + +Our city chapters screen documentaries at independent theaters and other venues regularly throughout the year. With most films we launch an accompanying educational and action-oriented campaign to address the issues presented by the films. Some of our chapters air films on their local public Access TV channel. And all the films we buy we make available for people to borrow from us for free, either to watch themselves or to screen in their own neighborhoods. + +Our local city chapter sub-sites offer several tools to connect and inform people through our website, including a calendar of local activist events, a directory of local progressive and radical groups, and a blog for writers to contribute local news and perspectives. + +All in all, through the screenings, public access TV, this website, and our Lending Library, our City Chapters aim to provide an information and resource network that will reduce its city's dependence on corporate media, providing more meaningful and reliable ways to stay informed on the issues that matter. >> (citation: https://www.filmsforaction.org/about/) + +To install it simply download the zip file, and then proceed as with any other plugin from XBMC: + System->Settings->Add-ons->Install from zip file. + +Any issues detected can be reported using this forum thread for official support: https://forum.kodi.tv/showthread.php?tid=198524 + +Enjoy it! + +jamontes diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/addon.xml b/.install/.kodi/addons/plugin.video.filmsforaction/addon.xml new file mode 100644 index 000000000..722fb04f9 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/addon.xml @@ -0,0 +1,26 @@ + + + + + + + + video + + + all + Films For Action videos + Films For Action is a community-powered learning library and alternative news center for people who want to change the world. It uses the power of film to raise awareness of important social, environmental, and media-related issues not covered by the mainstream news. This Add-on allows you to watch most of its videos.[CR]More than 2,500 videos are available. + Films For Action vídeos + Films For Action usa el poder de los vídeos para llamar la atención sobre la importancia de los problemas sociales, medioambientales, e informativos no cubiertos por los principales medios de comunicación. Films For Action ofrece una fuente alternativa de noticias y aprendizaje mantenida por la comunidad para la gente que quiere cambiar el mundo. Este Add-on permite ver los vídeos de Films For Action mostrados en su página web.[CR]Más de 2500 vídeos están disponibles. + Films For Action médiathèque + Cette plugin permetre de regarder le Films For Action médiathèque qui se trouve sur leur site web. + Films For Action videos + Dieses plugin erlaubt es, die videos aus dem Films For Action website zu sehen. + en + GPL-3.0-or-later + https://forum.kodi.tv/showthread.php?tid=198524 + https://github.com/jamontes/plugin.video.filmsforaction + https://www.filmsforaction.org/ + + diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/changelog.txt b/.install/.kodi/addons/plugin.video.filmsforaction/changelog.txt new file mode 100644 index 000000000..ff865e81f --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/changelog.txt @@ -0,0 +1,76 @@ +1.1.1 (2020.05.06) +- Quick fix due to website changes. +1.1.0 (2019.01.27) +- Updated add-on for Isengard+ due to missing add-on dependency. +1.0.10 (2018.09.05) +- Removed add-on dependencies. +- Deprecated Gotham release due to missing add-on dependency. +- Modified to allow upgrades from Helix release onwards. +- Minor changes and refactoring. +1.0.9 (2018.03.30) +- Minor fixes due to website changes. +- Added sort by most viewed/recent option from add-on settings menu. +1.0.8 (2017.03.13) +- Fixed video scraper due to Dailymotion website changes. +- Fixed category menus due to website changes. +- Fixed search option due to website changes. +- Fixed Next/Previous Page entries due to website changes. +- Improved Vimeo scraper to support non standard video resolutions. +- Updated logging system. +- Updated lutils library to latest version. +- Updated category test case. +- Improved video patterns. +- Code refactored and cleaned. +1.0.7 (2016.04.05) +- Update Vimeo scraper due to Vimeo website changes. +1.0.6 (2015.08.11) +- Quick fix on scraper due to Dailymotion website changes. +- Fixed Next/Previous Page entries due to website changes. +1.0.5 +- Added filter to show All/Best videos from add-on setings menu. +- Included category menu into the Next/Previous Page title entries. +- Fixed last page regexp for Next Page entry to allow explore beyond nine. +1.0.4 +- Fixed Next/Previous Page entries due to website changes. +- Updated Dailymotion scraper to support more video formats. +1.0.3 +- Rewriten video list parser to make it more tolerant to website changes. +- Replaced builtin Disclose TV parser on behalf of Disclose TV add-on (credits: sphere) +1.0.2 +- Added Travis-CI integration support for scraper API testing. +- Added support for Disclose TV videos. +- Modified video list parser due to website changes. +- Improved Search option for retrieve all kind of videos. +1.0.1 +- Added mail tag into addon.xml file. +- Added settings.xml encoding line. +- Modified add-on logging system according to recommendations. +- Modified categories parser due to website changes. +1.0.0 +- Fixed Vimeo custom scraper due to API change. +- Modified video list parser due to website changes. +- Added support for SnagFilms videos thanks to the SnagFilms add-on (credits: t1m). +- Updated version number for official repo bump. +0.1.0 +- Remove the Vimeo add-on dependency due to a problem found running on Gotham. +- Made custom Vimeo scraper to support the Vimeo videos on Gotham. +- Added forum thread to addon and README files. +0.0.9 +- Clean up code and documentation. +- prepare everything for first bump into git repo. +0.0.8 +- Added fanart and icon. +0.0.7 +- Added Search option for videos. +0.0.6 +- Added internationalization support. +0.0.5 +- Added support for Dailymotion, archive.org and kickstarter videos. +0.0.4 +- Updated parser for page navigation. +0.0.3 +- Suported videos for Youtube and Vimeo. +0.0.2 +- added suport for info tags in videos. +0.0.1 +- First Try. diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/default.py b/.install/.kodi/addons/plugin.video.filmsforaction/default.py new file mode 100644 index 000000000..9a15c4f3d --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/default.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- + +''' + KODI Films For Action video add-on. + Copyright (C) 2014 José Antonio Montes (jamontes) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + + This is the first trial of the Films For Action video add-on for KODI. + This add-on gets the videos from Films For Action web site and shows them properly ordered. + You can choose the preferred language for the videos, if it is available. + This plugin depends on the lutil library functions. +''' + +from resources.lib.plugin import Plugin +import resources.lib.ffa_api as api + +plugin_id = 'plugin.video.filmsforaction' + +localized_strings = { + 'Next page': 30010, + 'Type not suported': 30011, + 'Video not located': 30012, + 'Previous page': 30013, + 'All videos': 30014, + 'Search': 30015, + } + +p = Plugin(plugin_id) + +settings = p.get_plugin_settings() +translation = p.get_plugin_translation() + +debug_flag = settings.getSetting("debug") == "true" + +p.set_debug_mode(debug_flag) +api.set_debug(debug_flag, p.log) + +# By default, both, the website and the add-on are setup to show only the best videos. +all_filter = '&quality=all' if settings.getSetting("show_best") == "false" else '&quality=best' +sort_param = '&sort=new' if settings.getSetting("views") == "false" else '&sort=views' + +if all_filter == '&quality=all': + p.log("ffa.main: 'all videos' filter is explicit setup.") + +if sort_param == '&sort=views': + p.log("ffa.main: sorted by views is explicit setup.") + +def get_located_string(string_name): + return translation(localized_strings.get(string_name)).encode('utf-8') or string_name if string_name in localized_strings else string_name + + +# Entry point +def run(): + p.log("ffa.run") + + # Get params + params = p.get_plugin_parms() + + action = params.get("action", '') + if action: + eval("%s(params)" % action) + else: + create_index(params) + + +# Main menu +def create_index(params): + p.log("ffa.create_index "+repr(params)) + + action = 'main_list' + + category_list = api.get_categories() + + menu_entry = get_located_string('All videos') + all_videos_item = { + 'thumbnail': '', + 'info': { + 'title': menu_entry, + 'genre': menu_entry, + }, + 'path': p.get_plugin_path(url = 'https://www.filmsforaction.org/library/?category=all+videos' + all_filter + sort_param, action = action, category = menu_entry), + 'IsPlayable' : False + } + + menu_entry = get_located_string('Search') + search_videos_item = { + 'thumbnail': '', + 'info': { + 'title': menu_entry, + 'genre': menu_entry, + }, + 'path': p.get_plugin_path(action = 'search_videos', category = menu_entry), + 'IsPlayable' : False + } + + categories = [ { + 'thumbnail': '', + 'info': { + 'title': category_title, + 'genre': category_title + }, + 'path': p.get_plugin_path(url = category_url + all_filter + sort_param, action = action, category = category_title), + 'IsPlayable' : False + } for category_url, category_title in category_list] + + categories.insert(0, all_videos_item) + categories.insert(0, search_videos_item) + p.add_items(categories) + + +def main_list(params): + p.log("ffa.main_list "+repr(params)) + + category = params.get("category", "") + videos = api.get_videolist(params.get("url"), category) + reset_cache = 'yes' if params.get('reset_cache') == 'yes' or videos['reset_cache'] else 'no' + + video_list = [ { + 'thumbnail' : video_entry.get('thumbnail') or '', + 'info': { + 'title' : video_entry.get('title'), + 'plot' : video_entry.get('plot') or '', + 'studio' : video_entry.get('credits') or '', + 'genre' : video_entry.get('genre') or '', + 'rating' : video_entry.get('rating') or '', + 'duration': video_entry.get('duration') or 1, + }, + 'path' : p.get_plugin_path( + url = video_entry['url'], + action = 'play_video', + ) if video_entry['IsPlayable'] else p.get_plugin_path( + url = video_entry['url'], + action = 'main_list', + reset_cache = reset_cache, + category = category, + ), + 'IsPlayable' : video_entry['IsPlayable'] + } for video_entry in videos['video_list']] + + p.add_items(video_list, reset_cache == 'yes') + + +def search_videos(params): + p.log("ffa.search_video "+repr(params)) + + search_string = p.get_keyboard_text(get_located_string('Search')) + if search_string: + params['url'] = api.get_search_url(search_string) + all_filter + sort_param + p.log("ffa.search Value of search url: %s" % params['url']) + return main_list(params) + + +def play_video(params): + p.log("ffa.play_video "+repr(params)) + + url = api.get_playable_url(params.get("url")) + + if url: + return p.play_resolved_url(url) + else: + p.showWarning(get_located_string('Type not suported')) + + +run() diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/fanart.jpg b/.install/.kodi/addons/plugin.video.filmsforaction/fanart.jpg new file mode 100644 index 000000000..46ecc9bc8 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.filmsforaction/fanart.jpg differ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/icon.png b/.install/.kodi/addons/plugin.video.filmsforaction/icon.png new file mode 100644 index 000000000..5273da8f3 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.filmsforaction/icon.png differ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/icon_and_fanart_credits.txt b/.install/.kodi/addons/plugin.video.filmsforaction/icon_and_fanart_credits.txt new file mode 100644 index 000000000..b4f3b921b --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/icon_and_fanart_credits.txt @@ -0,0 +1,5 @@ +icon.png +source: https://myspace.com/filmsforaction + +fanart.jpg +source: https://www.facebook.com/filmsforaction/ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/__init__.py b/.install/.kodi/addons/plugin.video.filmsforaction/resources/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/__init__.pyo b/.install/.kodi/addons/plugin.video.filmsforaction/resources/__init__.pyo new file mode 100644 index 000000000..87bbf183b Binary files /dev/null and b/.install/.kodi/addons/plugin.video.filmsforaction/resources/__init__.pyo differ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.de_de/strings.po b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.de_de/strings.po new file mode 100644 index 000000000..57dc7b470 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.de_de/strings.po @@ -0,0 +1,55 @@ +# Kodi Media Center language file +# Addon Name: Films For Action +# Addon id: plugin.video.filmsforaction +# Addon Provider: Jose Antonio Montes (jamontes) +msgid "" +msgstr "" +"Project-Id-Version: XBMC Addons\n" +"Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" +"POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Jose Antonio Montes (jamontes)\n" +"Language-Team: German (http://www.transifex.com/projects/p/xbmc-addons/language/de/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: de_DE\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgctxt "#30010" +msgid "Next page" +msgstr "Nachste seite" + +msgctxt "#30011" +msgid "Video type not supported" +msgstr "Video-Typ nicht unterstutzt" + +msgctxt "#30012" +msgid "Couldn't locate video url" +msgstr "Konnte Video-URL nicht finden" + +msgctxt "#30013" +msgid "Previous page" +msgstr "Vorherige seite" + +msgctxt "#30014" +msgid "All videos" +msgstr "Alle videos" + +msgctxt "#30015" +msgid "Search" +msgstr "Suchen" + +# Empty strings from id 30016 to 30100 + +msgctxt "#30101" +msgid "Debug (enable logs)" +msgstr "Debug (logs)" + +msgctxt "#30102" +msgid "Show only the best videos (default)" +msgstr "Nur die besten videos anzeigen" + +msgctxt "#30103" +msgid "Sort by most viewed" +msgstr "Sortieren nach Popularität" diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.en_gb/strings.po b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.en_gb/strings.po new file mode 100644 index 000000000..a210304a3 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.en_gb/strings.po @@ -0,0 +1,55 @@ +# Kodi Media Center language file +# Addon Name: Films For Action +# Addon id: plugin.video.filmsforaction +# Addon Provider: Jose Antonio Montes (jamontes) +msgid "" +msgstr "" +"Project-Id-Version: XBMC Addons\n" +"Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" +"POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Jose Antonio Montes (jamontes)\n" +"Language-Team: English (http://www.transifex.com/projects/p/xbmc-addons/language/en/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: en_GB\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgctxt "#30010" +msgid "Next page" +msgstr "" + +msgctxt "#30011" +msgid "Video type not supported" +msgstr "" + +msgctxt "#30012" +msgid "Couldn't locate video url" +msgstr "" + +msgctxt "#30013" +msgid "Previous page" +msgstr "" + +msgctxt "#30014" +msgid "All videos" +msgstr "" + +msgctxt "#30015" +msgid "Search" +msgstr "" + +# Empty strings from id 30016 to 30100 + +msgctxt "#30101" +msgid "Debug (enable logs)" +msgstr "" + +msgctxt "#30102" +msgid "Show only the best videos (default)" +msgstr "" + +msgctxt "#30103" +msgid "Sort by most viewed" +msgstr "" diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.es_es/strings.po b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.es_es/strings.po new file mode 100644 index 000000000..1090750f0 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.es_es/strings.po @@ -0,0 +1,55 @@ +# Kodi Media Center language file +# Addon Name: Films For Action +# Addon id: plugin.video.filmsforaction +# Addon Provider: Jose Antonio Montes (jamontes) +msgid "" +msgstr "" +"Project-Id-Version: XBMC Addons\n" +"Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" +"POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Jose Antonio Montes (jamontes)\n" +"Language-Team: Spanish (http://www.transifex.com/projects/p/xbmc-addons/language/es/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: es_ES\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgctxt "#30010" +msgid "Next page" +msgstr "Siguiente pagina" + +msgctxt "#30011" +msgid "Video type not supported" +msgstr "Tipo de video no soportado" + +msgctxt "#30012" +msgid "Couldn't locate video url" +msgstr "No se ha podido localizar la url de video" + +msgctxt "#30013" +msgid "Previous page" +msgstr "Pagina anterior" + +msgctxt "#30014" +msgid "All videos" +msgstr "Todos los videos" + +msgctxt "#30015" +msgid "Search" +msgstr "Buscar" + +# Empty strings from id 30016 to 30100 + +msgctxt "#30101" +msgid "Debug (enable logs)" +msgstr "Debug (activa los logs)" + +msgctxt "#30102" +msgid "Show only the best videos (default)" +msgstr "Mostrar sólo los mejores vídeos (por defecto)" + +msgctxt "#30103" +msgid "Sort by most viewed" +msgstr "Ordenar por los más vistos" diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.fr_fr/strings.po b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.fr_fr/strings.po new file mode 100644 index 000000000..dbc740c8f --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/language/resource.language.fr_fr/strings.po @@ -0,0 +1,55 @@ +# Kodi Media Center language file +# Addon Name: Films For Action +# Addon id: plugin.video.filmsforaction +# Addon Provider: Jose Antonio Montes (jamontes) +msgid "" +msgstr "" +"Project-Id-Version: XBMC Addons\n" +"Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" +"POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Jose Antonio Montes (jamontes)\n" +"Language-Team: French (http://www.transifex.com/projects/p/xbmc-addons/language/fr/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fr_FR\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgctxt "#30010" +msgid "Next page" +msgstr "Page suivant" + +msgctxt "#30011" +msgid "Video type not supported" +msgstr "Type de video non supporte" + +msgctxt "#30012" +msgid "Couldn't locate video url" +msgstr "Adresse video non trouvee" + +msgctxt "#30013" +msgid "Previous page" +msgstr "Page precedente" + +msgctxt "#30014" +msgid "All videos" +msgstr "Toutes les videos" + +msgctxt "#30015" +msgid "Search" +msgstr "Recherchez" + +# Empty strings from id 30016 to 30100 + +msgctxt "#30101" +msgid "Debug (enable logs)" +msgstr "Debug (logs)" + +msgctxt "#30102" +msgid "Show only the best videos (default)" +msgstr "Afficher uniquement les meilleures vidéos (par défaut)" + +msgctxt "#30103" +msgid "Sort by most viewed" +msgstr "Classement par popularité" diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/__init__.py b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/__init__.pyo b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/__init__.pyo new file mode 100644 index 000000000..5ba31a65c Binary files /dev/null and b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/__init__.pyo differ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/ffa_api.py b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/ffa_api.py new file mode 100644 index 000000000..2a40dee82 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/ffa_api.py @@ -0,0 +1,224 @@ +# _*_ coding: utf-8 _*_ + +''' + Films For Action API lib: library functions for Films For Action add-on. + Copyright (C) 2014 José Antonio Montes (jamontes) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + + Description: + These funtions are called from the main plugin module, aimed to ease + and simplify the add-on development process. + Release 0.1.6 +''' + +import lutil as l + +root_url = 'https://www.filmsforaction.org' + +def set_debug(debug_flag, func_log=l.local_log): + """This function is a wrapper to setup the debug flag into the lutil module""" + l.set_debug_mode(debug_flag, func_log) + + +def get_categories(): + """This function gets the categories list from the FFA website.""" + category_pattern = "'topic', '([0-9]+)'[^>]+?>([^<]+?)" + category_url = 'https://www.filmsforaction.org/library/?category=all+videos&topic=%s' + + buffer_url = l.carga_web(root_url) + category_list = [] + topic_list = [] + for topic, category_name in l.find_multiple(buffer_url, category_pattern): + if topic not in topic_list: + url = category_url % topic + category_list.append((url, category_name)) + topic_list.append(topic) + + return category_list + + +def get_videolist(url, cat_menu=""): + """This function gets the video list from the FFA website and returns them in a pretty data format.""" + video_entry_sep = 'content-view' + video_url_pattern = '["\'](/watch/[^/]*?/)' + video_thumb_pattern = '["\'](/img/[^"\']*?)["\']' + video_title_pattern = '([^<]+?)' + video_plot_pattern = '([^<]*?)' + video_duration_pattern = '([0-9]+[ ]+[Mm]in)' + video_rating_pattern = '([0-9.]+[ ]+[Ss]tars)' + video_views_pattern = '([0-9,]+[ ]+[Vv]iews)' + page_num_pattern = 'href=["\']/library/([0-9]+)/' + page_num_url_pattern = 'href=["\'](/library/%d/[^"\']*?)["\']' + page_num_cur_pattern = '/library/([0-9]+)/' + + buffer_url = l.carga_web(url) + + video_list = [] + + reset_cache = False + current_page_num = int(l.find_first(url, page_num_cur_pattern) or '1') + last_page_num = int(max(l.find_multiple(buffer_url, page_num_pattern) or ('1',), key=int)) + + if current_page_num != 1: + prev_page_num = current_page_num - 1 + previous_page_url = root_url + l.find_first(buffer_url, page_num_url_pattern % prev_page_num) + video_entry = { 'url': previous_page_url, 'title': '<< %s (%d)' % (cat_menu, prev_page_num), 'IsPlayable': False } + video_list.append(video_entry) + reset_cache = True + + category = "Video" # The category is no longer included in the latest website change. + for video_section in buffer_url.split(video_entry_sep)[1:]: + url = l.find_first(video_section, video_url_pattern) + if not url: + continue # Sometimes in the search menu can appear articles and other sort of entries rather than videos. + thumb = l.find_first(video_section, video_thumb_pattern) + title = l.find_first(video_section, video_title_pattern) + plot = l.find_first(video_section, video_plot_pattern) + duration = l.find_first(video_section, video_duration_pattern) + rating = l.find_first(video_section, video_rating_pattern) + views = l.find_first(video_section, video_views_pattern) + l.log('Video info. url: "%s" thumb: "%s" title: "%s"' % (url, thumb, title)) + l.log('Video tags. duration: "%s" rating: "%s" views: "%s"' % (duration, rating, views)) + video_entry = { + 'url' : root_url + url, + 'title' : title.strip() or '.', + 'thumbnail' : root_url + thumb, + 'plot' : "%s\n%s - %s - %s" % ( + plot.strip(), + duration, + views, + rating, + ), + 'duration' : int(duration.split()[0]) * 60 if duration else 0, + 'rating' : rating.split()[0] if rating else '', + 'genre' : category, + 'IsPlayable' : True + } + video_list.append(video_entry) + + if current_page_num < last_page_num: + next_page_num = current_page_num + 1 + next_page_url = root_url + l.find_first(buffer_url, page_num_url_pattern % next_page_num) + video_entry = { 'url': next_page_url, 'title': '>> %s (%d/%d)' % (cat_menu, next_page_num, last_page_num), 'IsPlayable': False } + video_list.append(video_entry) + + return { 'video_list': video_list, 'reset_cache': reset_cache } + + +def get_search_url(search_string): + """This function returns the search encoded URL to find the videos from the input search string""" + return 'https://www.filmsforaction.org/library/?search=' + l.get_url_encoded(search_string) + + +def get_playable_url(url): + """This function returns a playable URL parsing the different video sources available from the iframe link""" + video_patterns = ( + ('vimeo1', 'vimeo.com/video/([0-9]+)', 'vimeo'), + ('vimeo2', 'vimeo.com%2Fvideo%2F([0-9]+)', 'vimeo'), + ('youtube1', 'videoId: "([0-9A-Za-z_-]{11})', 'youtube'), + ('youtube2', 'youtube.com%2Fwatch%3Fv%3D([0-9A-Za-z_-]{11})', 'youtube'), + ('youtube3', 'youtube.com%2Fembed%2F([0-9A-Za-z_-]{11})', 'youtube'), + ('youtube4', 'youtube.com/embed/([0-9A-Za-z_-]{11})', 'youtube'), + ('dailymotion1', ' src="[htp:]*?//www.dailymotion.com/embed/video/([0-9a-zA-Z]+)', 'dailymotion'), + ('dailymotion2', 'www.dailymotion.com%2Fembed%2Fvideo%2F(.*?)%', 'dailymotion'), + ('archiveorg1', ' src="(https://archive.org/embed/[^"]*?)"', 'archiveorg'), + ('kickstarter1', ' src="(https://www.kickstarter.com/[^"]*?)"', 'kickstarter'), + ('tagtele1', ' src="(http://www.tagtele.com/embed/[^"]*?)"', 'tagtele'), + ) + + buffer_url = l.carga_web(url) + + for pattern_name, pattern, source in video_patterns: + video_id = l.find_first(buffer_url, pattern) + if video_id: + l.log('We have found this video_id "%s" using the pattern: "%s"' % (video_id, pattern_name)) + try: + playable_url = eval("get_playable_%s_url(video_id)" % source) + break + except: + l.log("There was a problem using the pattern '%s' on this video link: '%s'\n" % (pattern_name, url)) + return '' + else: + l.log("Sorry, but we cannot support the type of video for this link yet:\n'%s'" % url) + playable_url = '' + + return playable_url + + +def get_playable_vimeo_url(video_id): + """This function returns the playable URL for the Vimeo embedded video from the video_id retrieved.""" + video_quality_pattern = '"profile":[0-9]+,"width":([0-9]+),.*?,"url":"([^"]*?)"' + quality_list = ('640', '720', '480', '320', '960', '1280', '1920') + + video_info_url = 'https://player.vimeo.com/video/' + video_id + buffer_link = l.carga_web(video_info_url) + video_options = dict((quality, video) for quality, video in l.find_multiple(buffer_link, video_quality_pattern)) + if len(video_options): + l.log("List of video options: "+repr(video_options)) + for quality in quality_list: + if quality in video_options: + return video_options.get(quality) + + # This quality isn't normalized as it doesn't appear into the quality_list. + return video_options.get(video_options.keys()[0]) + + return "" + + +def get_playable_youtube_url(video_id): + """This function returns the URL path to call the Youtube add-on with the video_id retrieved.""" + return 'plugin://plugin.video.youtube/play/?video_id=' + video_id + + +def get_playable_dailymotion_url(video_id): + """This function returns the playable URL for the Dalymotion embedded video from the video_id retrieved.""" + daily_video_pattern = '"([0-9]+)":\[[^]]*?{"type":"video\\\/mp4","url":"([^"]+?)"' + daily_video_qualities = ('480', '720', '380', '240') + + daily_url = 'http://www.dailymotion.com/embed/video/' + video_id + buffer_link = l.carga_web(daily_url) + video_options = dict((quality, video) for quality, video in l.find_multiple(buffer_link, daily_video_pattern)) + l.log("List of video options: "+repr(video_options)) + for quality_option in daily_video_qualities: + if quality_option in video_options: + video_url = video_options.get(quality_option).replace('\\','') + return video_url + + return "" + + +def get_playable_archiveorg_url(archive_url): + """This function returns the playable URL for the Archive.org embedded video from the video link retrieved.""" + pattern_archive_video = '. + + Description: + These funtions are called from the main plugin module, aimed to ease + and simplify the plugin development process. + Release 0.1.10 +''' + +# First of all We must import all the libraries used for plugin development. +import re, urllib, urllib2 +from datetime import date + +debug_enable = False # The debug logs are disabled by default. + + +def local_log(message): + """This function logs the debug messages under development and testing process. + It is never invoked when the add-on is run under KODI. + Called from the library modules by other functions.""" + + if debug_enable: + print("%s" % message) + + +log = local_log # Use local log function by default. + + +def set_debug_mode(debug_flag, func_log=local_log): + """This function sets the debug_enable var to log everything if debug option is true.""" + + global debug_enable + global log + debug_enable = debug_flag in ("true", True) + log = func_log + + +def get_url_decoded(url): + """This function returns the URL decoded.""" + + log('get_url_decoded URL: "%s"' % url) + return urllib.unquote_plus(url) + + +def get_url_encoded(url): + """This function returns the URL encoded.""" + + log('get_url_encoded URL: "%s"' % url) + return urllib.quote_plus(url) + + +def get_parms_encoded(**kwars): + """This function returns the params encoded to form an URL or data post.""" + + param_list = urllib.urlencode(kwars) + log('get_parms_encoded params: "%s"' % param_list) + return param_list + + +def carga_web(url): + """This function loads the html code from a webserver and returns it into a string.""" + + log('carga_web URL: "%s"' % url) + MiReq = urllib2.Request(url) # We use the Request method because we need to add a header into the HTTP GET to the web site. + # We have to tell the web site we are using a real browser. + MiReq.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20100101 Firefox/17.0') # This is a true Firefox header. + MiConex = urllib2.urlopen(MiReq) # We open the HTTP connection to the URL. + MiHTML = MiConex.read() # We load all the HTML contents from the web page and store it into a var. + MiConex.close() # We close the HTTP connection as we have all the info required. + + return MiHTML + + +def carga_web_cookies(url, headers=''): + """This function loads the html code from a webserver passsing the headers into the GET message + and returns it into a string along with the cookies collected from the website.""" + + log('carga_web_cookies URL: "%s"' % url) + MiReq = urllib2.Request(url) # We use the Request method because we need to add a header into the HTTP GET to the web site. + # We have to tell the web site we are using a real browser. + MiReq.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20100101 Firefox/17.0') # This is a true Firefox header. + for key in headers: + MiReq.add_header(key, headers[key]) + MiConex = urllib2.urlopen(MiReq) # We open the HTTP connection to the URL. + MiHTML = MiConex.read() # We load all the HTML contents from the web page and store it into a var. + server_info = "%s" % MiConex.info() + my_cookie_pattern = re.compile('Set-Cookie: ([^;]+);') + my_cookies = '' + pcookie = '' + for lcookie in my_cookie_pattern.findall(server_info): + if (lcookie != pcookie): + my_cookies = "%s %s;" % (my_cookies, lcookie) + pcookie = lcookie + + MiConex.close() # We close the HTTP connection as we have all the info required. + + log('carga_web Cookie: "%s"' % my_cookies) + return MiHTML, my_cookies + + +def send_post_data(url, headers='', data=''): + """This function sends an HTTP POST request with theirr corresponding headers and data to a webserver + and returns the html code into a string along with the cookies collected from the website.""" + + log('send_post_data URL: "%s"' % url) + MiReq = urllib2.Request(url, data) # We use the Request method because we need to send a HTTP POST to the web site. + # We have to tell the web site we are using a real browser. + MiReq.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20100101 Firefox/17.0') # This is a true Firefox header. + for key in headers: + MiReq.add_header(key, headers[key]) + MiConex = urllib2.urlopen(MiReq) # We open the HTTP connection to the URL. + MiHTML = MiConex.read() # We load all the HTML contents from the web page and store it into a var. + server_info = "%s" % MiConex.info() + my_cookie_pattern = re.compile('Set-Cookie: ([^;]+);') + my_cookies = '' + pcookie = '' + for lcookie in my_cookie_pattern.findall(server_info): + if (lcookie != pcookie): + my_cookies = "%s %s;" % (my_cookies, lcookie) + pcookie = lcookie + + MiConex.close() # We close the HTTP connection as we have all the info required. + + log('send_post_data Cookie: "%s"' % my_cookies) + return MiHTML, my_cookies + + +def get_redirect(url): + """This function returns the redirected URL from a 30X response received from the webserver.""" + + log('get_redirect URL: "%s"' % url) + MiConex = urllib.urlopen(url) # Opens the http connection to the URL. + MiHTML = MiConex.geturl() # Gets the URL redirect link and stores it into MiHTML. + MiConex.close() # Close the http connection as we get what we need. + + return MiHTML + + +def find_multiple(text, pattern): + """This function allows us to find multiples matches from a regexp into a string.""" + + pat_url_par = re.compile(pattern, re.DOTALL) + + return pat_url_par.findall(text) + + +def find_first(text, pattern): + """This function gets back the first match from a regexp into a string.""" + + pat_url_par = re.compile(pattern, re.DOTALL) + try: + return pat_url_par.findall(text)[0] + except: + return "" + + +def get_this_year(): + """This function gets the current year. Useful to fill the Year infolabel whenever it isn't available""" + + return date.today().year + + +def get_clean_title(title): + """This function returns the title or desc cleaned. + ref: http://www.thesauruslex.com/typo/eng/enghtml.htm""" + + return title.\ + replace('á', 'á').\ + replace('à', 'á').\ + replace('é', 'é').\ + replace('è', 'è').\ + replace('í', 'í').\ + replace('ó', 'ó').\ + replace('ò', 'ò').\ + replace('ú', 'ú').\ + replace('ä', 'ä').\ + replace('ï', 'ï').\ + replace('ö', 'ö').\ + replace('ü', 'ü').\ + replace('ß', 'ß').\ + replace('ñ', 'ñ').\ + replace('ç', 'ç').\ + replace('Á', 'Á').\ + replace('À', 'À').\ + replace('É', 'É').\ + replace('È', 'È').\ + replace('Í', 'Í').\ + replace('Ó', 'Ó').\ + replace('Ò', 'Ò').\ + replace('Ú', 'Ú').\ + replace('Ä', 'Ä').\ + replace('Ï', 'Ï').\ + replace('Ö', 'Ö').\ + replace('Ü', 'Ü').\ + replace('Ñ', 'Ñ').\ + replace('Ç', 'Ç').\ + replace('"', '"').\ + replace(''', "´").\ + replace(' ', " ").\ + replace('–', '').\ + replace('’', "'").\ + replace('“', '"').\ + replace('”', '"').\ + replace('‟', "'").\ + replace('…', '').\ + replace('’', "´").\ + replace('«', '"').\ + replace('»', '"').\ + replace('¡', '¡').\ + replace('&iinte;', '¿').\ + replace('&', '&').\ + replace(' ', '').\ + replace('"', '"').\ + replace('ª', 'ª').\ + replace('º', 'º').\ + replace('·', '·').\ + replace('…', '...').\ + replace('
', '').\ + strip() + + +def get_clean_html_tags(html_text): + """This function returns the text or desc cleaned from html tags.""" + + return re.sub(r'<[^>]*?>', '', html_text, count=0, flags=re.DOTALL) diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/lutil.pyo b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/lutil.pyo new file mode 100644 index 000000000..a42b2faae Binary files /dev/null and b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/lutil.pyo differ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/plugin.py b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/plugin.py new file mode 100644 index 000000000..8f3a517d5 --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/plugin.py @@ -0,0 +1,174 @@ +# _*_ coding: utf-8 _*_ + +''' + plugin: library class for XBMC video add-ons. + Copyright (C) 2014 José Antonio Montes (jamontes) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + + Description: + These class methods are called from the main add-on module, aimed to ease + and simplify the add-on development process. + Release 0.1.3 +''' + +# First of all We must import all the libraries used for plugin development. +import sys, urllib, re, os +import xbmcplugin, xbmcaddon, xbmcgui, xbmcaddon, xbmc + +class Plugin(): + + def __init__(self, plugin_id='', show_thumb_as_fanart=True): + self.pluginpath = sys.argv[0] + self.pluginhandle = int(sys.argv[1]) + self.pluginparams = sys.argv[2] + self.plugin_id = plugin_id + self.debug_enable = False # The debug logs are disabled by default. + self.plugin_settings = xbmcaddon.Addon(id=self.plugin_id) + self.translation = self.plugin_settings.getLocalizedString + self.root_path = self.plugin_settings.getAddonInfo('path') + self.fanart_file = os.path.join(self.root_path, "fanart.jpg") + self.show_thumb_as_fanart = show_thumb_as_fanart + + + def get_plugin_settings(self): + """This is a getter method to return the settings method reference.""" + return self.plugin_settings + + + def get_plugin_translation(self): + """This is a getter method to return the translation method reference.""" + return self.translation + + + def get_system_language(self): + """This method returns the GUI language.""" + return xbmc.getLanguage() + + + def set_debug_mode(self, debug_flag=""): + """This method sets the debug_enable flag to log everything if debug option within add-on settings is activated.""" + self.debug_enable = debug_flag in ("true", True) + + + def set_fanart(self): + """This method setup the file and global plugin fanart.""" + xbmcplugin.setPluginFanart(self.pluginhandle, self.fanart_file) + + + def log(self, message): + """This method logs the messages into the main XBMC log file, only if debug option is activated from the add-on settings. + This method is called from the main add-on module.""" + if self.debug_enable: + try: + xbmc.log(msg=message, level=xbmc.LOGNOTICE) + except: + xbmc.log('%s: log this line is not possible due to encoding string problems' % self.plugin_id, level=xbmc.LOGNOTICE) + + + def _log(self, message): + """This method logs the messages into the main XBMC log file, only if debug option is activated from the add-on settings. + This method is privated and only called from other methods within the class.""" + if self.debug_enable: + try: + xbmc.log(msg=message, level=xbmc.LOGNOTICE) + except: + xbmc.log('%s: _log this line is not possible due to encoding string problems' % self.plugin_id, level=xbmc.LOGNOTICE) + + + def get_plugin_parms(self): + """This method gets all the parameters passed to the plugin from XBMC API and retuns a dictionary. + Example: plugin://plugin.video.atactv/?parametro1=valor1¶metro2=valor2¶metro3""" + params = sys.argv[2] + + pattern_params = re.compile('[?&]([^=&]+)=?([^&]*)') + options = dict((parameter, urllib.unquote_plus(value)) for (parameter, value) in pattern_params.findall(params)) + self._log("get_plugin_parms " + repr(options)) + return options + + + def get_plugin_path(self, **kwars): + """This method returns the add-on path URL encoded along with all its parameters.""" + return sys.argv[0] + '?' + urllib.urlencode(kwars) + + + def get_url_decoded(self, url): + """This method returns the URL decoded.""" + self._log('get_url_decoded URL: "%s"' % url) + return urllib.unquote_plus(url) + + + def get_url_encoded(self, url): + """This method returns the URL encoded.""" + self._log('get_url_encoded URL: "%s"' % url) + return urllib.quote_plus(url) + + + def set_content_list(self, contents="episodes"): + """This method sets the video contents for the video list.""" + self._log("set_content_list contents: " + contents) + xbmcplugin.setContent(self.pluginhandle, contents) + + + def set_plugin_category(self, genre=''): + """This method sets the plugin genre for the video list.""" + xbmcplugin.setPluginCategory(self.pluginhandle, genre) + + + def get_keyboard_text(self, prompt): + """This method gets an input text from the keyboard.""" + self._log('get_keyboard_text prompt: "%s"' % prompt) + + keyboard = xbmc.Keyboard('', prompt) + keyboard.doModal() + if keyboard.isConfirmed() and keyboard.getText(): + self._log("get_keyboard_text input text: '%s'" % keyboard.getText()) + return keyboard.getText() + else: + # Close directory as empty result. + xbmcplugin.endOfDirectory(self.pluginhandle, succeeded=True, updateListing=False, cacheToDisc=False) + return "" + + + def add_items(self, items, updateListing=False): + """This method adds the list of items (links and folders) to the add-on video list.""" + item_list = [] + for item in items: + if item['IsPlayable']: # It is a link + link_item = xbmcgui.ListItem(item['info']['title'], iconImage = "DefaultVideo.png", thumbnailImage = item['thumbnail']) + link_item.setInfo(type = "Video", infoLabels = item['info']) + link_item.setProperty('IsPlayable', 'true') + link_item.setProperty('Fanart_Image', item['thumbnail'] if self.show_thumb_as_fanart else self.fanart_file) + else: # It is a folder + link_item = xbmcgui.ListItem(item['info']['title'], iconImage = "DefaultFolder.png", thumbnailImage = '') + link_item.setInfo(type = "Video", infoLabels = item['info']) + link_item.setProperty('Fanart_Image', self.fanart_file) + item_list.append((item['path'], link_item, not item['IsPlayable'])) + xbmcplugin.addDirectoryItems(self.pluginhandle, item_list, len(item_list)) + xbmcplugin.endOfDirectory(self.pluginhandle, succeeded=True, updateListing=updateListing, cacheToDisc=True) + #xbmcplugin.setContent(self.pluginhandle, 'episodes') + xbmcplugin.setContent(self.pluginhandle, 'movies') + + + def showWarning(self, message): + """This method shows a popup window with a notices message through the XBMC GUI during 5 secs.""" + self._log("showWarning message: %s" % message) + xbmcgui.Dialog().notification('Info:', message, time=6000) + + + def play_resolved_url(self, url = ""): + """This method plays the video file pointed by the URL passed as argument.""" + self._log("play_resolved_url pluginhandle = [%s] url = [%s]" % (self.pluginhandle, url)) + listitem = xbmcgui.ListItem(path=url) + return xbmcplugin.setResolvedUrl(self.pluginhandle, True, listitem) diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/plugin.pyo b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/plugin.pyo new file mode 100644 index 000000000..a4f1810bd Binary files /dev/null and b/.install/.kodi/addons/plugin.video.filmsforaction/resources/lib/plugin.pyo differ diff --git a/.install/.kodi/addons/plugin.video.filmsforaction/resources/settings.xml b/.install/.kodi/addons/plugin.video.filmsforaction/resources/settings.xml new file mode 100644 index 000000000..e7ab2b03f --- /dev/null +++ b/.install/.kodi/addons/plugin.video.filmsforaction/resources/settings.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/__init__.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/__init__.pyo new file mode 100644 index 000000000..2e4dce8e1 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/__init__.pyo differ diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/lib/__init__.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/lib/__init__.pyo new file mode 100644 index 000000000..047fc4b5d Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/lib/__init__.pyo differ diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/lib/addon.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/lib/addon.pyo new file mode 100644 index 000000000..731931017 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/lib/addon.pyo differ diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/lib/api.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/lib/api.pyo new file mode 100644 index 000000000..2a7b56782 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/lib/api.pyo differ diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/lib/kodilogging.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/lib/kodilogging.pyo new file mode 100644 index 000000000..d2352df86 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/lib/kodilogging.pyo differ diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/lib/utils.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/lib/utils.pyo new file mode 100644 index 000000000..bc55cab8e Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/lib/utils.pyo differ diff --git a/.install/.kodi/addons/plugin.video.francetv/resources/lib/video.pyo b/.install/.kodi/addons/plugin.video.francetv/resources/lib/video.pyo new file mode 100644 index 000000000..3a806a48b Binary files /dev/null and b/.install/.kodi/addons/plugin.video.francetv/resources/lib/video.pyo differ diff --git a/.install/.kodi/addons/plugin.video.vstream/resources/__init__.pyo b/.install/.kodi/addons/plugin.video.vstream/resources/__init__.pyo index d13b1cad9..abf60bc44 100644 Binary files a/.install/.kodi/addons/plugin.video.vstream/resources/__init__.pyo and b/.install/.kodi/addons/plugin.video.vstream/resources/__init__.pyo differ diff --git a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/__init__.pyo b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/__init__.pyo new file mode 100644 index 000000000..dd2a26478 Binary files /dev/null and b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/__init__.pyo differ diff --git a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/allow_redirects.pyo b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/allow_redirects.pyo new file mode 100644 index 000000000..c63afa03c Binary files /dev/null and b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/allow_redirects.pyo differ diff --git a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/dailymotion.py b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/dailymotion.py index fe6d2938a..c7d630ea5 100644 --- a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/dailymotion.py +++ b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/dailymotion.py @@ -1,104 +1,104 @@ -#-*- coding: utf-8 -*- -# https://github.com/Kodi-vStream/venom-xbmc-addons -from resources.lib.handler.requestHandler import cRequestHandler -from resources.lib.parser import cParser -from resources.hosters.hoster import iHoster -from resources.lib.comaddon import dialog - -class cHoster(iHoster): - - def __init__(self): - self.__sDisplayName = 'DailyMotion' - self.__sFileName = self.__sDisplayName - self.__sHD = '' - - def getDisplayName(self): - return self.__sDisplayName - - def setDisplayName(self, sDisplayName): - self.__sDisplayName = sDisplayName + ' [COLOR skyblue]' + self.__sDisplayName + '[/COLOR] [COLOR khaki]' + self.__sHD + '[/COLOR]' - - def setFileName(self, sFileName): - self.__sFileName = sFileName - - def getFileName(self): - return self.__sFileName - - def setHD(self, sHD): - if 'hd' in sHD: - self.__sHD = 'HD' - else: - self.__sHD = '' - - def getHD(self): - return self.__sHD - - def getPluginIdentifier(self): - return 'dailymotion' - - def isDownloadable(self): - return True - - def getPattern(self): - return '' - - def setUrl(self, sUrl): - self.__sUrl = str(sUrl) - self.__sUrl = self.__sUrl.replace('http://dai.ly/', '') - self.__sUrl = self.__sUrl.replace('http://www.dailymotion.com/', '') - self.__sUrl = self.__sUrl.replace('https://www.dailymotion.com/', '') - self.__sUrl = self.__sUrl.replace('embed/', '') - self.__sUrl = self.__sUrl.replace('video/', '') - self.__sUrl = self.__sUrl.replace('sequence/', '') - self.__sUrl = self.__sUrl.replace('swf/', '') - self.__sUrl = 'http://www.dailymotion.com/embed/video/' + str(self.__sUrl) - - def checkUrl(self, sUrl): - return True - - def getUrl(self): - return self.__sUrl - - def getMediaLink(self): - return self.__getMediaLinkForGuest() - - def __getMediaLinkForGuest(self): - api_call = False - url=[] - qua=[] - - oRequest = cRequestHandler(self.__sUrl) - oRequest.addHeaderEntry('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:70.0) Gecko/20100101 Firefox/70.0') - oRequest.addHeaderEntry('Accept-Language', 'fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3') - oRequest.addHeaderEntry('Cookie', "ff=off") - sHtmlContent = oRequest.request() - - - oParser = cParser() - - sPattern = '{"type":"application.+?mpegURL","url":"([^"]+)"}' - aResult = oParser.parse(sHtmlContent, sPattern) - - if (aResult[0] == True): - oRequest = cRequestHandler(aResult[1][0]) - oRequest.addHeaderEntry('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:70.0) Gecko/20100101 Firefox/70.0') - oRequest.addHeaderEntry('Accept-Language', 'fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3') - sHtmlContent = oRequest.request() - - sPattern = 'NAME="([^"]+)"(,PROGRESSIVE-URI="([^"]+)"|http(.+?)\#)' - aResult = oParser.parse(sHtmlContent, sPattern) - if (aResult[0] == True): - for aEntry in reversed(aResult[1]): - quality = aEntry[0].replace('@60', '') - if quality not in qua: - qua.append(quality) - link = aEntry[2] if aEntry[2] else 'http' + aEntry[3] - url.append(link) - - - api_call = dialog().VSselectqual(qua, url) - - if (api_call): - return True, api_call - - return False, False +#-*- coding: utf-8 -*- +# https://github.com/Kodi-vStream/venom-xbmc-addons +from resources.lib.handler.requestHandler import cRequestHandler +from resources.lib.parser import cParser +from resources.hosters.hoster import iHoster +from resources.lib.comaddon import dialog + +class cHoster(iHoster): + + def __init__(self): + self.__sDisplayName = 'DailyMotion' + self.__sFileName = self.__sDisplayName + self.__sHD = '' + + def getDisplayName(self): + return self.__sDisplayName + + def setDisplayName(self, sDisplayName): + self.__sDisplayName = sDisplayName + ' [COLOR skyblue]' + self.__sDisplayName + '[/COLOR] [COLOR khaki]' + self.__sHD + '[/COLOR]' + + def setFileName(self, sFileName): + self.__sFileName = sFileName + + def getFileName(self): + return self.__sFileName + + def setHD(self, sHD): + if 'hd' in sHD: + self.__sHD = 'HD' + else: + self.__sHD = '' + + def getHD(self): + return self.__sHD + + def getPluginIdentifier(self): + return 'dailymotion' + + def isDownloadable(self): + return True + + def getPattern(self): + return '' + + def setUrl(self, sUrl): + self.__sUrl = str(sUrl) + self.__sUrl = self.__sUrl.replace('http://dai.ly/', '') + self.__sUrl = self.__sUrl.replace('http://www.dailymotion.com/', '') + self.__sUrl = self.__sUrl.replace('https://www.dailymotion.com/', '') + self.__sUrl = self.__sUrl.replace('embed/', '') + self.__sUrl = self.__sUrl.replace('video/', '') + self.__sUrl = self.__sUrl.replace('sequence/', '') + self.__sUrl = self.__sUrl.replace('swf/', '') + self.__sUrl = 'http://www.dailymotion.com/embed/video/' + str(self.__sUrl) + + def checkUrl(self, sUrl): + return True + + def getUrl(self): + return self.__sUrl + + def getMediaLink(self): + return self.__getMediaLinkForGuest() + + def __getMediaLinkForGuest(self): + api_call = False + url=[] + qua=[] + + oRequest = cRequestHandler(self.__sUrl) + oRequest.addHeaderEntry('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:70.0) Gecko/20100101 Firefox/70.0') + oRequest.addHeaderEntry('Accept-Language', 'fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3') + oRequest.addHeaderEntry('Cookie', "ff=off") + sHtmlContent = oRequest.request() + + + oParser = cParser() + + sPattern = '{"type":"application.+?mpegURL","url":"([^"]+)"}' + aResult = oParser.parse(sHtmlContent, sPattern) + + if (aResult[0] == True): + oRequest = cRequestHandler(aResult[1][0]) + oRequest.addHeaderEntry('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:70.0) Gecko/20100101 Firefox/70.0') + oRequest.addHeaderEntry('Accept-Language', 'fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3') + sHtmlContent = oRequest.request() + + sPattern = 'NAME="([^"]+)"(,PROGRESSIVE-URI="([^"]+)"|http(.+?)\#)' + aResult = oParser.parse(sHtmlContent, sPattern) + if (aResult[0] == True): + for aEntry in reversed(aResult[1]): + quality = aEntry[0].replace('@60', '') + if quality not in qua: + qua.append(quality) + link = aEntry[2] if aEntry[2] else 'http' + aEntry[3] + url.append(link) + + + api_call = dialog().VSselectqual(qua, url) + + if (api_call): + return True, api_call + + return False, False diff --git a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/femax.py b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/femax.py index 0c680d650..540df4947 100644 --- a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/femax.py +++ b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/femax.py @@ -1,84 +1,84 @@ -# -*- coding: utf-8 -*- -# vStream https://github.com/Kodi-vStream/venom-xbmc-addons -# https://femax20.com/v/xxxxxxxxxx - -import json -from resources.lib.handler.requestHandler import cRequestHandler -from resources.lib.parser import cParser -from resources.hosters.hoster import iHoster -from resources.lib.comaddon import dialog ,VSlog - - -class cHoster(iHoster): - - def __init__(self): - self.__sDisplayName = 'Femax' - self.__sFileName = self.__sDisplayName - self.__sHD = '' - - def getDisplayName(self): - return self.__sDisplayName - - def setDisplayName(self, sDisplayName): - self.__sDisplayName = sDisplayName + ' [COLOR skyblue]' + self.__sDisplayName + '[/COLOR]' - - def setFileName(self, sFileName): - self.__sFileName = sFileName - - def getFileName(self): - return self.__sFileName - - def isDownloadable(self): - return False - - def getPluginIdentifier(self): - return 'femax' - - def setUrl(self, sUrl): - self.__sUrl = str(sUrl) - - def checkUrl(self, sUrl): - return True - - def __getUrl(self, media_id): - return - - def getMediaLink(self): - return self.__getMediaLinkForGuest() - - def __getMediaLinkForGuest(self, api_call=None): - - req = self.__sUrl.replace('/v/','/api/source/') - pdata = 'r' # 'r' ou n'importe quelle chaine (ne doit pas etre vide) - oRequestHandler = cRequestHandler(req) - oRequestHandler.setRequestType(1) - oRequestHandler.addParametersLine(pdata) - sHtmlContent = oRequestHandler.request() - jsonrsp = json.loads(sHtmlContent ) - - list_url = [] - list_q = [] - bfind = False - for rsp in jsonrsp: - if rsp == 'data': - bfind = True - if not bfind: - return False, False - - try: - for idata in range(len(jsonrsp['data'])): - url = jsonrsp['data'][idata]['file'] - stype = jsonrsp['data'][idata]['type'] - q = jsonrsp['data'][idata]['label'] - list_url.append(url + '.' + stype) - list_q.append(q) - - api_call = dialog().VSselectqual(list_q, list_url) - - except: - return False, False - - if api_call: - return True, api_call - - return False, False +# -*- coding: utf-8 -*- +# vStream https://github.com/Kodi-vStream/venom-xbmc-addons +# https://femax20.com/v/xxxxxxxxxx + +import json +from resources.lib.handler.requestHandler import cRequestHandler +from resources.lib.parser import cParser +from resources.hosters.hoster import iHoster +from resources.lib.comaddon import dialog ,VSlog + + +class cHoster(iHoster): + + def __init__(self): + self.__sDisplayName = 'Femax' + self.__sFileName = self.__sDisplayName + self.__sHD = '' + + def getDisplayName(self): + return self.__sDisplayName + + def setDisplayName(self, sDisplayName): + self.__sDisplayName = sDisplayName + ' [COLOR skyblue]' + self.__sDisplayName + '[/COLOR]' + + def setFileName(self, sFileName): + self.__sFileName = sFileName + + def getFileName(self): + return self.__sFileName + + def isDownloadable(self): + return False + + def getPluginIdentifier(self): + return 'femax' + + def setUrl(self, sUrl): + self.__sUrl = str(sUrl) + + def checkUrl(self, sUrl): + return True + + def __getUrl(self, media_id): + return + + def getMediaLink(self): + return self.__getMediaLinkForGuest() + + def __getMediaLinkForGuest(self, api_call=None): + + req = self.__sUrl.replace('/v/','/api/source/') + pdata = 'r' # 'r' ou n'importe quelle chaine (ne doit pas etre vide) + oRequestHandler = cRequestHandler(req) + oRequestHandler.setRequestType(1) + oRequestHandler.addParametersLine(pdata) + sHtmlContent = oRequestHandler.request() + jsonrsp = json.loads(sHtmlContent ) + + list_url = [] + list_q = [] + bfind = False + for rsp in jsonrsp: + if rsp == 'data': + bfind = True + if not bfind: + return False, False + + try: + for idata in range(len(jsonrsp['data'])): + url = jsonrsp['data'][idata]['file'] + stype = jsonrsp['data'][idata]['type'] + q = jsonrsp['data'][idata]['label'] + list_url.append(url + '.' + stype) + list_q.append(q) + + api_call = dialog().VSselectqual(list_q, list_url) + + except: + return False, False + + if api_call: + return True, api_call + + return False, False diff --git a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/frenchvid.py b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/frenchvid.py index 679846cdb..ed8a29e0f 100644 --- a/.install/.kodi/addons/plugin.video.vstream/resources/hosters/frenchvid.py +++ b/.install/.kodi/addons/plugin.video.vstream/resources/hosters/frenchvid.py @@ -1,116 +1,116 @@ -#-*- coding: utf-8 -*- -# https://github.com/Kodi-vStream/venom-xbmc-addons -#french-stream /18117-la-frontire-verte-saison-1.html -#liens FVS io -from resources.lib.handler.requestHandler import cRequestHandler -from resources.lib.parser import cParser -from resources.hosters.hoster import iHoster -from resources.lib.comaddon import dialog, VSlog -from resources.lib.util import QuotePlus -import json - - -UA = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:70.0) Gecko/20100101 Firefox/70.0' - -class cHoster(iHoster): - - def __init__(self): - self.__sDisplayName = 'Frenchvid' - self.__sFileName = self.__sDisplayName - - def getDisplayName(self): - return self.__sDisplayName - - def setDisplayName(self, sDisplayName): - self.__sDisplayName = sDisplayName + ' [COLOR skyblue]' + self.__sDisplayName + '[/COLOR]' - - def setFileName(self, sFileName): - self.__sFileName = sFileName - - def getFileName(self): - return self.__sFileName - - def getPluginIdentifier(self): - return 'frenchvid' - - def isDownloadable(self): - return True - - def setUrl(self, sUrl): - self.__sUrl = str(sUrl) - - def checkUrl(self, sUrl): - return True - - def getUrl(self): - return self.__sUrl - - def getMediaLink(self): - return self.__getMediaLinkForGuest() - - def __getMediaLinkForGuest(self): - - if 'yggseries.com' in self.__sUrl: - baseUrl = 'https://yggseries.com/api/source/' - elif 'french-vid' in self.__sUrl: - baseUrl = 'https://www.fembed.com/api/source/' - elif 'fembed' in self.__sUrl: - baseUrl = 'https://www.fembed.com/api/source/' - elif 'feurl' in self.__sUrl: - baseUrl = 'https://feurl.com/api/source/' - elif 'vfsplayer' in self.__sUrl: - baseUrl = 'https://vfsplayer.xyz/api/source/' - elif 'fsimg' in self.__sUrl: - baseUrl = 'https://www.fsimg.info/api/source/' - elif 'fem.tohds' in self.__sUrl: - baseUrl = 'https://feurl.com/api/source/' - elif 'core1player' in self.__sUrl: - baseUrl = 'https://www.core1player.com/api/source/' - elif 'gotochus' in self.__sUrl: - baseUrl = 'https://www.gotochus.com/api/source/' - - if 'fem.tohds' in self.__sUrl: - oRequestHandler = cRequestHandler(self.__sUrl) - sHtmlContent = oRequestHandler.request() - - sPattern = '' - aResult = oParser.parse(sHtmlContent, sPattern) - - if (aResult[0]): - - sHosterUrl = '' - Referer ='' - url = aResult[1][0] - if (not url.startswith("http")): - url = "http:" + url - #url = 'http://www.sporcanli.com/frame2.html' #a garder peut etre utils pour ajouter un hébergeur - - VSlog(url) - - if 'espn-live.stream' in url: - oRequestHandler = cRequestHandler(url) - sHtmlContent2 = oRequestHandler.request() - aResult = re.findall(sPattern, sHtmlContent2) - if aResult: - url = aResult[0] # redirection vers un autre site - - if 'footballreal.xyz' in url: - oRequestHandler = cRequestHandler(url) - sHtmlContent2 = oRequestHandler.request() - sPattern1 = '' - aResult = re.findall(sPattern2, sHtmlContent) - if aResult: - accountid = aResult[0] - jsonUrl = 'https://player-api.new.' + accountid + '?format=short' - oRequestHandler = cRequestHandler(jsonUrl) - sHtmlContent = oRequestHandler.request() - sPattern3 = '"m3u8_url":"(.+?)"' - aResult = re.findall(sPattern3, sHtmlContent) - if aResult: - sHosterUrl = aResult[0] - - if 'forbet.tv' in url:#Probleme ssl - oRequestHandler = cRequestHandler(url) - sHtmlContent2 = oRequestHandler.request() - sPattern2 = 'file: "([^"]+)"' - aResult = re.findall(sPattern2, sHtmlContent2) - if aResult: - sHosterUrl = aResult[0] - - if 'p.hd24.watch' in url:#Terminé - oRequestHandler = cRequestHandler(url) - sHtmlContent2 = oRequestHandler.request() - sPattern2 = 'data-channel="([^"]+)">' - aResult = re.findall(sPattern2, sHtmlContent2) - if aResult: - Host = '190-2-146-56.livesports24.online' - sHosterUrl = 'https://' + Host + '/' + aResult[0] + '.m3u8' - - if 'hdsoccerstreams.net' in url:#Pas terminer - oRequestHandler = cRequestHandler(url) - sHtmlContent2 = oRequestHandler.request() - sPattern2 = ' + """ % (self.OutputString(attrs).replace('"', r'\"')) + + def OutputString(self, attrs=None): + # Build up our result + # + result = [] + append = result.append + + # First, the key=value pair + append("%s=%s" % (self.key, self.coded_value)) + + # Now add any defined attributes + if attrs is None: + attrs = self._reserved + items = sorted(self.items()) + for key, value in items: + if value == "": + continue + if key not in attrs: + continue + if key == "expires" and isinstance(value, int): + append("%s=%s" % (self._reserved[key], _getdate(value))) + elif key == "max-age" and isinstance(value, int): + append("%s=%d" % (self._reserved[key], value)) + elif key == "secure": + append(str(self._reserved[key])) + elif key == "httponly": + append(str(self._reserved[key])) + else: + append("%s=%s" % (self._reserved[key], value)) + + # Return the result + return _semispacejoin(result) + + +# +# Pattern for finding cookie +# +# This used to be strict parsing based on the RFC2109 and RFC2068 +# specifications. I have since discovered that MSIE 3.0x doesn't +# follow the character rules outlined in those specs. As a +# result, the parsing rules here are less strict. +# + +_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]" +_CookiePattern = re.compile(r""" + (?x) # This is a verbose pattern + (?P # Start of group 'key' + """ + _LegalCharsPatt + r"""+? # Any word of at least one letter + ) # End of group 'key' + ( # Optional group: there may not be a value. + \s*=\s* # Equal Sign + (?P # Start of group 'val' + "(?:[^\\"]|\\.)*" # Any doublequoted string + | # or + \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr + | # or + """ + _LegalCharsPatt + r"""* # Any word or empty string + ) # End of group 'val' + )? # End of optional value group + \s* # Any number of spaces. + (\s+|;|$) # Ending either at space, semicolon, or EOS. + """, re.ASCII) # May be removed if safe. + + +# At long last, here is the cookie class. Using this class is almost just like +# using a dictionary. See this module's docstring for example usage. +# +class BaseCookie(dict): + """A container class for a set of Morsels.""" + + def value_decode(self, val): + """real_value, coded_value = value_decode(STRING) + Called prior to setting a cookie's value from the network + representation. The VALUE is the value read from HTTP + header. + Override this function to modify the behavior of cookies. + """ + return val, val + + def value_encode(self, val): + """real_value, coded_value = value_encode(VALUE) + Called prior to setting a cookie's value from the dictionary + representation. The VALUE is the value being assigned. + Override this function to modify the behavior of cookies. + """ + strval = str(val) + return strval, strval + + def __init__(self, input=None): + if input: + self.load(input) + + def __set(self, key, real_value, coded_value): + """Private method for setting a cookie's value""" + M = self.get(key, Morsel()) + M.set(key, real_value, coded_value) + dict.__setitem__(self, key, M) + + def __setitem__(self, key, value): + """Dictionary style assignment.""" + rval, cval = self.value_encode(value) + self.__set(key, rval, cval) + + def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): + """Return a string suitable for HTTP.""" + result = [] + items = sorted(self.items()) + for key, value in items: + result.append(value.output(attrs, header)) + return sep.join(result) + + __str__ = output + + @as_native_str() + def __repr__(self): + l = [] + items = sorted(self.items()) + for key, value in items: + if PY2 and isinstance(value.value, unicode): + val = str(value.value) # make it a newstr to remove the u prefix + else: + val = value.value + l.append('%s=%s' % (str(key), repr(val))) + return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l)) + + def js_output(self, attrs=None): + """Return a string suitable for JavaScript.""" + result = [] + items = sorted(self.items()) + for key, value in items: + result.append(value.js_output(attrs)) + return _nulljoin(result) + + def load(self, rawdata): + """Load cookies from a string (presumably HTTP_COOKIE) or + from a dictionary. Loading cookies from a dictionary 'd' + is equivalent to calling: + map(Cookie.__setitem__, d.keys(), d.values()) + """ + if isinstance(rawdata, str): + self.__parse_string(rawdata) + else: + # self.update() wouldn't call our custom __setitem__ + for key, value in rawdata.items(): + self[key] = value + return + + def __parse_string(self, mystr, patt=_CookiePattern): + i = 0 # Our starting point + n = len(mystr) # Length of string + M = None # current morsel + + while 0 <= i < n: + # Start looking for a cookie + match = patt.search(mystr, i) + if not match: + # No more cookies + break + + key, value = match.group("key"), match.group("val") + + i = match.end(0) + + # Parse the key, value in case it's metainfo + if key[0] == "$": + # We ignore attributes which pertain to the cookie + # mechanism as a whole. See RFC 2109. + # (Does anyone care?) + if M: + M[key[1:]] = value + elif key.lower() in Morsel._reserved: + if M: + if value is None: + if key.lower() in Morsel._flags: + M[key] = True + else: + M[key] = _unquote(value) + elif value is not None: + rval, cval = self.value_decode(value) + self.__set(key, rval, cval) + M = self[key] + + +class SimpleCookie(BaseCookie): + """ + SimpleCookie supports strings as cookie values. When setting + the value using the dictionary assignment notation, SimpleCookie + calls the builtin str() to convert the value to a string. Values + received from HTTP are kept as strings. + """ + def value_decode(self, val): + return _unquote(val), val + + def value_encode(self, val): + strval = str(val) + return strval, _quote(strval) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/http/server.py b/.install/.kodi/addons/script.module.future/libs/future/backports/http/server.py new file mode 100644 index 000000000..b1c11e0c7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/http/server.py @@ -0,0 +1,1226 @@ +"""HTTP server classes. + +From Python 3.3 + +Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see +SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, +and CGIHTTPRequestHandler for CGI scripts. + +It does, however, optionally implement HTTP/1.1 persistent connections, +as of version 0.3. + +Notes on CGIHTTPRequestHandler +------------------------------ + +This class implements GET and POST requests to cgi-bin scripts. + +If the os.fork() function is not present (e.g. on Windows), +subprocess.Popen() is used as a fallback, with slightly altered semantics. + +In all cases, the implementation is intentionally naive -- all +requests are executed synchronously. + +SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL +-- it may execute arbitrary Python code or external programs. + +Note that status code 200 is sent prior to execution of a CGI script, so +scripts cannot send other status codes such as 302 (redirect). + +XXX To do: + +- log requests even later (to capture byte count) +- log user-agent header and other interesting goodies +- send error log to separate file +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future import utils +from future.builtins import * + + +# See also: +# +# HTTP Working Group T. Berners-Lee +# INTERNET-DRAFT R. T. Fielding +# H. Frystyk Nielsen +# Expires September 8, 1995 March 8, 1995 +# +# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt +# +# and +# +# Network Working Group R. Fielding +# Request for Comments: 2616 et al +# Obsoletes: 2068 June 1999 +# Category: Standards Track +# +# URL: http://www.faqs.org/rfcs/rfc2616.html + +# Log files +# --------- +# +# Here's a quote from the NCSA httpd docs about log file format. +# +# | The logfile format is as follows. Each line consists of: +# | +# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb +# | +# | host: Either the DNS name or the IP number of the remote client +# | rfc931: Any information returned by identd for this person, +# | - otherwise. +# | authuser: If user sent a userid for authentication, the user name, +# | - otherwise. +# | DD: Day +# | Mon: Month (calendar name) +# | YYYY: Year +# | hh: hour (24-hour format, the machine's timezone) +# | mm: minutes +# | ss: seconds +# | request: The first line of the HTTP request as sent by the client. +# | ddd: the status code returned by the server, - if not available. +# | bbbb: the total number of bytes sent, +# | *not including the HTTP/1.0 header*, - if not available +# | +# | You can determine the name of the file accessed through request. +# +# (Actually, the latter is only true if you know the server configuration +# at the time the request was made!) + +__version__ = "0.6" + +__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] + +from future.backports import html +from future.backports.http import client as http_client +from future.backports.urllib import parse as urllib_parse +from future.backports import socketserver + +import io +import mimetypes +import os +import posixpath +import select +import shutil +import socket # For gethostbyaddr() +import sys +import time +import copy +import argparse + + +# Default error message template +DEFAULT_ERROR_MESSAGE = """\ + + + + + Error response + + +

Error response

+

Error code: %(code)d

+

Message: %(message)s.

+

Error code explanation: %(code)s - %(explain)s.

+ + +""" + +DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" + +def _quote_html(html): + return html.replace("&", "&").replace("<", "<").replace(">", ">") + +class HTTPServer(socketserver.TCPServer): + + allow_reuse_address = 1 # Seems to make sense in testing environment + + def server_bind(self): + """Override server_bind to store the server name.""" + socketserver.TCPServer.server_bind(self) + host, port = self.socket.getsockname()[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port + + +class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + + """HTTP request handler base class. + + The following explanation of HTTP serves to guide you through the + code as well as to expose any misunderstandings I may have about + HTTP (so you don't need to read the code to figure out I'm wrong + :-). + + HTTP (HyperText Transfer Protocol) is an extensible protocol on + top of a reliable stream transport (e.g. TCP/IP). The protocol + recognizes three parts to a request: + + 1. One line identifying the request type and path + 2. An optional set of RFC-822-style headers + 3. An optional data part + + The headers and data are separated by a blank line. + + The first line of the request has the form + + + + where is a (case-sensitive) keyword such as GET or POST, + is a string containing path information for the request, + and should be the string "HTTP/1.0" or "HTTP/1.1". + is encoded using the URL encoding scheme (using %xx to signify + the ASCII character with hex code xx). + + The specification specifies that lines are separated by CRLF but + for compatibility with the widest range of clients recommends + servers also handle LF. Similarly, whitespace in the request line + is treated sensibly (allowing multiple spaces between components + and allowing trailing whitespace). + + Similarly, for output, lines ought to be separated by CRLF pairs + but most clients grok LF characters just fine. + + If the first line of the request has the form + + + + (i.e. is left out) then this is assumed to be an HTTP + 0.9 request; this form has no optional headers and data part and + the reply consists of just the data. + + The reply form of the HTTP 1.x protocol again has three parts: + + 1. One line giving the response code + 2. An optional set of RFC-822-style headers + 3. The data + + Again, the headers and data are separated by a blank line. + + The response code line has the form + + + + where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), + is a 3-digit response code indicating success or + failure of the request, and is an optional + human-readable string explaining what the response code means. + + This server parses the request and the headers, and then calls a + function specific to the request type (). Specifically, + a request SPAM will be handled by a method do_SPAM(). If no + such method exists the server sends an error response to the + client. If it exists, it is called with no arguments: + + do_SPAM() + + Note that the request name is case sensitive (i.e. SPAM and spam + are different requests). + + The various request details are stored in instance variables: + + - client_address is the client IP address in the form (host, + port); + + - command, path and version are the broken-down request line; + + - headers is an instance of email.message.Message (or a derived + class) containing the header information; + + - rfile is a file object open for reading positioned at the + start of the optional input data part; + + - wfile is a file object open for writing. + + IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + + The first thing to be written must be the response line. Then + follow 0 or more header lines, then a blank line, and then the + actual data (if any). The meaning of the header lines depends on + the command executed by the server; in most cases, when data is + returned, there should be at least one header line of the form + + Content-type: / + + where and should be registered MIME types, + e.g. "text/html" or "text/plain". + + """ + + # The Python system version, truncated to its first component. + sys_version = "Python/" + sys.version.split()[0] + + # The server software version. You may want to override this. + # The format is multiple whitespace-separated strings, + # where each string is of the form name[/version]. + server_version = "BaseHTTP/" + __version__ + + error_message_format = DEFAULT_ERROR_MESSAGE + error_content_type = DEFAULT_ERROR_CONTENT_TYPE + + # The default request version. This only affects responses up until + # the point where the request line is parsed, so it mainly decides what + # the client gets back when sending a malformed request line. + # Most web servers default to HTTP 0.9, i.e. don't send a status line. + default_request_version = "HTTP/0.9" + + def parse_request(self): + """Parse a request (internal). + + The request should be stored in self.raw_requestline; the results + are in self.command, self.path, self.request_version and + self.headers. + + Return True for success, False for failure; on failure, an + error is sent back. + + """ + self.command = None # set in case of error on the first line + self.request_version = version = self.default_request_version + self.close_connection = 1 + requestline = str(self.raw_requestline, 'iso-8859-1') + requestline = requestline.rstrip('\r\n') + self.requestline = requestline + words = requestline.split() + if len(words) == 3: + command, path, version = words + if version[:5] != 'HTTP/': + self.send_error(400, "Bad request version (%r)" % version) + return False + try: + base_version_number = version.split('/', 1)[1] + version_number = base_version_number.split(".") + # RFC 2145 section 3.1 says there can be only one "." and + # - major and minor numbers MUST be treated as + # separate integers; + # - HTTP/2.4 is a lower version than HTTP/2.13, which in + # turn is lower than HTTP/12.3; + # - Leading zeros MUST be ignored by recipients. + if len(version_number) != 2: + raise ValueError + version_number = int(version_number[0]), int(version_number[1]) + except (ValueError, IndexError): + self.send_error(400, "Bad request version (%r)" % version) + return False + if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": + self.close_connection = 0 + if version_number >= (2, 0): + self.send_error(505, + "Invalid HTTP Version (%s)" % base_version_number) + return False + elif len(words) == 2: + command, path = words + self.close_connection = 1 + if command != 'GET': + self.send_error(400, + "Bad HTTP/0.9 request type (%r)" % command) + return False + elif not words: + return False + else: + self.send_error(400, "Bad request syntax (%r)" % requestline) + return False + self.command, self.path, self.request_version = command, path, version + + # Examine the headers and look for a Connection directive. + try: + self.headers = http_client.parse_headers(self.rfile, + _class=self.MessageClass) + except http_client.LineTooLong: + self.send_error(400, "Line too long") + return False + + conntype = self.headers.get('Connection', "") + if conntype.lower() == 'close': + self.close_connection = 1 + elif (conntype.lower() == 'keep-alive' and + self.protocol_version >= "HTTP/1.1"): + self.close_connection = 0 + # Examine the headers and look for an Expect directive + expect = self.headers.get('Expect', "") + if (expect.lower() == "100-continue" and + self.protocol_version >= "HTTP/1.1" and + self.request_version >= "HTTP/1.1"): + if not self.handle_expect_100(): + return False + return True + + def handle_expect_100(self): + """Decide what to do with an "Expect: 100-continue" header. + + If the client is expecting a 100 Continue response, we must + respond with either a 100 Continue or a final response before + waiting for the request body. The default is to always respond + with a 100 Continue. You can behave differently (for example, + reject unauthorized requests) by overriding this method. + + This method should either return True (possibly after sending + a 100 Continue response) or send an error response and return + False. + + """ + self.send_response_only(100) + self.flush_headers() + return True + + def handle_one_request(self): + """Handle a single HTTP request. + + You normally don't need to override this method; see the class + __doc__ string for information on how to handle specific HTTP + commands such as GET and POST. + + """ + try: + self.raw_requestline = self.rfile.readline(65537) + if len(self.raw_requestline) > 65536: + self.requestline = '' + self.request_version = '' + self.command = '' + self.send_error(414) + return + if not self.raw_requestline: + self.close_connection = 1 + return + if not self.parse_request(): + # An error code has been sent, just exit + return + mname = 'do_' + self.command + if not hasattr(self, mname): + self.send_error(501, "Unsupported method (%r)" % self.command) + return + method = getattr(self, mname) + method() + self.wfile.flush() #actually send the response if not already done. + except socket.timeout as e: + #a read or a write timed out. Discard this connection + self.log_error("Request timed out: %r", e) + self.close_connection = 1 + return + + def handle(self): + """Handle multiple requests if necessary.""" + self.close_connection = 1 + + self.handle_one_request() + while not self.close_connection: + self.handle_one_request() + + def send_error(self, code, message=None): + """Send and log an error reply. + + Arguments are the error code, and a detailed message. + The detailed message defaults to the short entry matching the + response code. + + This sends an error response (so it must be called before any + output has been generated), logs the error, and finally sends + a piece of HTML explaining the error to the user. + + """ + + try: + shortmsg, longmsg = self.responses[code] + except KeyError: + shortmsg, longmsg = '???', '???' + if message is None: + message = shortmsg + explain = longmsg + self.log_error("code %d, message %s", code, message) + # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201) + content = (self.error_message_format % + {'code': code, 'message': _quote_html(message), 'explain': explain}) + self.send_response(code, message) + self.send_header("Content-Type", self.error_content_type) + self.send_header('Connection', 'close') + self.end_headers() + if self.command != 'HEAD' and code >= 200 and code not in (204, 304): + self.wfile.write(content.encode('UTF-8', 'replace')) + + def send_response(self, code, message=None): + """Add the response header to the headers buffer and log the + response code. + + Also send two standard headers with the server software + version and the current date. + + """ + self.log_request(code) + self.send_response_only(code, message) + self.send_header('Server', self.version_string()) + self.send_header('Date', self.date_time_string()) + + def send_response_only(self, code, message=None): + """Send the response header only.""" + if message is None: + if code in self.responses: + message = self.responses[code][0] + else: + message = '' + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append(("%s %d %s\r\n" % + (self.protocol_version, code, message)).encode( + 'latin-1', 'strict')) + + def send_header(self, keyword, value): + """Send a MIME header to the headers buffer.""" + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append( + ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict')) + + if keyword.lower() == 'connection': + if value.lower() == 'close': + self.close_connection = 1 + elif value.lower() == 'keep-alive': + self.close_connection = 0 + + def end_headers(self): + """Send the blank line ending the MIME headers.""" + if self.request_version != 'HTTP/0.9': + self._headers_buffer.append(b"\r\n") + self.flush_headers() + + def flush_headers(self): + if hasattr(self, '_headers_buffer'): + self.wfile.write(b"".join(self._headers_buffer)) + self._headers_buffer = [] + + def log_request(self, code='-', size='-'): + """Log an accepted request. + + This is called by send_response(). + + """ + + self.log_message('"%s" %s %s', + self.requestline, str(code), str(size)) + + def log_error(self, format, *args): + """Log an error. + + This is called when a request cannot be fulfilled. By + default it passes the message on to log_message(). + + Arguments are the same as for log_message(). + + XXX This should go to the separate error log. + + """ + + self.log_message(format, *args) + + def log_message(self, format, *args): + """Log an arbitrary message. + + This is used by all other logging functions. Override + it if you have specific logging wishes. + + The first argument, FORMAT, is a format string for the + message to be logged. If the format string contains + any % escapes requiring parameters, they should be + specified as subsequent arguments (it's just like + printf!). + + The client ip and current date/time are prefixed to + every message. + + """ + + sys.stderr.write("%s - - [%s] %s\n" % + (self.address_string(), + self.log_date_time_string(), + format%args)) + + def version_string(self): + """Return the server software version string.""" + return self.server_version + ' ' + self.sys_version + + def date_time_string(self, timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + self.weekdayname[wd], + day, self.monthname[month], year, + hh, mm, ss) + return s + + def log_date_time_string(self): + """Return the current time formatted for logging.""" + now = time.time() + year, month, day, hh, mm, ss, x, y, z = time.localtime(now) + s = "%02d/%3s/%04d %02d:%02d:%02d" % ( + day, self.monthname[month], year, hh, mm, ss) + return s + + weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + + monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + + def address_string(self): + """Return the client address.""" + + return self.client_address[0] + + # Essentially static class variables + + # The version of the HTTP protocol we support. + # Set this to HTTP/1.1 to enable automatic keepalive + protocol_version = "HTTP/1.0" + + # MessageClass used to parse headers + MessageClass = http_client.HTTPMessage + + # Table mapping response codes to messages; entries have the + # form {code: (shortmessage, longmessage)}. + # See RFC 2616 and 6585. + responses = { + 100: ('Continue', 'Request received, please continue'), + 101: ('Switching Protocols', + 'Switching to new protocol; obey Upgrade header'), + + 200: ('OK', 'Request fulfilled, document follows'), + 201: ('Created', 'Document created, URL follows'), + 202: ('Accepted', + 'Request accepted, processing continues off-line'), + 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), + 204: ('No Content', 'Request fulfilled, nothing follows'), + 205: ('Reset Content', 'Clear input form for further input.'), + 206: ('Partial Content', 'Partial content follows.'), + + 300: ('Multiple Choices', + 'Object has several resources -- see URI list'), + 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), + 302: ('Found', 'Object moved temporarily -- see URI list'), + 303: ('See Other', 'Object moved -- see Method and URL list'), + 304: ('Not Modified', + 'Document has not changed since given time'), + 305: ('Use Proxy', + 'You must use proxy specified in Location to access this ' + 'resource.'), + 307: ('Temporary Redirect', + 'Object moved temporarily -- see URI list'), + + 400: ('Bad Request', + 'Bad request syntax or unsupported method'), + 401: ('Unauthorized', + 'No permission -- see authorization schemes'), + 402: ('Payment Required', + 'No payment -- see charging schemes'), + 403: ('Forbidden', + 'Request forbidden -- authorization will not help'), + 404: ('Not Found', 'Nothing matches the given URI'), + 405: ('Method Not Allowed', + 'Specified method is invalid for this resource.'), + 406: ('Not Acceptable', 'URI not available in preferred format.'), + 407: ('Proxy Authentication Required', 'You must authenticate with ' + 'this proxy before proceeding.'), + 408: ('Request Timeout', 'Request timed out; try again later.'), + 409: ('Conflict', 'Request conflict.'), + 410: ('Gone', + 'URI no longer exists and has been permanently removed.'), + 411: ('Length Required', 'Client must specify Content-Length.'), + 412: ('Precondition Failed', 'Precondition in headers is false.'), + 413: ('Request Entity Too Large', 'Entity is too large.'), + 414: ('Request-URI Too Long', 'URI is too long.'), + 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), + 416: ('Requested Range Not Satisfiable', + 'Cannot satisfy request range.'), + 417: ('Expectation Failed', + 'Expect condition could not be satisfied.'), + 428: ('Precondition Required', + 'The origin server requires the request to be conditional.'), + 429: ('Too Many Requests', 'The user has sent too many requests ' + 'in a given amount of time ("rate limiting").'), + 431: ('Request Header Fields Too Large', 'The server is unwilling to ' + 'process the request because its header fields are too large.'), + + 500: ('Internal Server Error', 'Server got itself in trouble'), + 501: ('Not Implemented', + 'Server does not support this operation'), + 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), + 503: ('Service Unavailable', + 'The server cannot process the request due to a high load'), + 504: ('Gateway Timeout', + 'The gateway server did not receive a timely response'), + 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), + 511: ('Network Authentication Required', + 'The client needs to authenticate to gain network access.'), + } + + +class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + + """Simple HTTP request handler with GET and HEAD commands. + + This serves files from the current directory and any of its + subdirectories. The MIME type for files is determined by + calling the .guess_type() method. + + The GET and HEAD requests are identical except that the HEAD + request omits the actual contents of the file. + + """ + + server_version = "SimpleHTTP/" + __version__ + + def do_GET(self): + """Serve a GET request.""" + f = self.send_head() + if f: + self.copyfile(f, self.wfile) + f.close() + + def do_HEAD(self): + """Serve a HEAD request.""" + f = self.send_head() + if f: + f.close() + + def send_head(self): + """Common code for GET and HEAD commands. + + This sends the response code and MIME headers. + + Return value is either a file object (which has to be copied + to the outputfile by the caller unless the command was HEAD, + and must be closed by the caller under all circumstances), or + None, in which case the caller has nothing further to do. + + """ + path = self.translate_path(self.path) + f = None + if os.path.isdir(path): + if not self.path.endswith('/'): + # redirect browser - doing basically what apache does + self.send_response(301) + self.send_header("Location", self.path + "/") + self.end_headers() + return None + for index in "index.html", "index.htm": + index = os.path.join(path, index) + if os.path.exists(index): + path = index + break + else: + return self.list_directory(path) + ctype = self.guess_type(path) + try: + f = open(path, 'rb') + except IOError: + self.send_error(404, "File not found") + return None + self.send_response(200) + self.send_header("Content-type", ctype) + fs = os.fstat(f.fileno()) + self.send_header("Content-Length", str(fs[6])) + self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) + self.end_headers() + return f + + def list_directory(self, path): + """Helper to produce a directory listing (absent index.html). + + Return value is either a file object, or None (indicating an + error). In either case, the headers are sent, making the + interface the same as for send_head(). + + """ + try: + list = os.listdir(path) + except os.error: + self.send_error(404, "No permission to list directory") + return None + list.sort(key=lambda a: a.lower()) + r = [] + displaypath = html.escape(urllib_parse.unquote(self.path)) + enc = sys.getfilesystemencoding() + title = 'Directory listing for %s' % displaypath + r.append('') + r.append('\n') + r.append('' % enc) + r.append('%s\n' % title) + r.append('\n

%s

' % title) + r.append('
\n
    ') + for name in list: + fullname = os.path.join(path, name) + displayname = linkname = name + # Append / for directories or @ for symbolic links + if os.path.isdir(fullname): + displayname = name + "/" + linkname = name + "/" + if os.path.islink(fullname): + displayname = name + "@" + # Note: a link to a directory displays with @ and links with / + r.append('
  • %s
  • ' + % (urllib_parse.quote(linkname), html.escape(displayname))) + # # Use this instead: + # r.append('
  • %s
  • ' + # % (urllib.quote(linkname), cgi.escape(displayname))) + r.append('
\n
\n\n\n') + encoded = '\n'.join(r).encode(enc) + f = io.BytesIO() + f.write(encoded) + f.seek(0) + self.send_response(200) + self.send_header("Content-type", "text/html; charset=%s" % enc) + self.send_header("Content-Length", str(len(encoded))) + self.end_headers() + return f + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = path.split('?',1)[0] + path = path.split('#',1)[0] + path = posixpath.normpath(urllib_parse.unquote(path)) + words = path.split('/') + words = filter(None, words) + path = os.getcwd() + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + if word in (os.curdir, os.pardir): continue + path = os.path.join(path, word) + return path + + def copyfile(self, source, outputfile): + """Copy all data between two file objects. + + The SOURCE argument is a file object open for reading + (or anything with a read() method) and the DESTINATION + argument is a file object open for writing (or + anything with a write() method). + + The only reason for overriding this would be to change + the block size or perhaps to replace newlines by CRLF + -- note however that this the default server uses this + to copy binary data as well. + + """ + shutil.copyfileobj(source, outputfile) + + def guess_type(self, path): + """Guess the type of a file. + + Argument is a PATH (a filename). + + Return value is a string of the form type/subtype, + usable for a MIME Content-type header. + + The default implementation looks the file's extension + up in the table self.extensions_map, using application/octet-stream + as a default; however it would be permissible (if + slow) to look inside the data to make a better guess. + + """ + + base, ext = posixpath.splitext(path) + if ext in self.extensions_map: + return self.extensions_map[ext] + ext = ext.lower() + if ext in self.extensions_map: + return self.extensions_map[ext] + else: + return self.extensions_map[''] + + if not mimetypes.inited: + mimetypes.init() # try to read system mime.types + extensions_map = mimetypes.types_map.copy() + extensions_map.update({ + '': 'application/octet-stream', # Default + '.py': 'text/plain', + '.c': 'text/plain', + '.h': 'text/plain', + }) + + +# Utilities for CGIHTTPRequestHandler + +def _url_collapse_path(path): + """ + Given a URL path, remove extra '/'s and '.' path elements and collapse + any '..' references and returns a colllapsed path. + + Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. + The utility of this function is limited to is_cgi method and helps + preventing some security attacks. + + Returns: A tuple of (head, tail) where tail is everything after the final / + and head is everything before it. Head will always start with a '/' and, + if it contains anything else, never have a trailing '/'. + + Raises: IndexError if too many '..' occur within the path. + + """ + # Similar to os.path.split(os.path.normpath(path)) but specific to URL + # path semantics rather than local operating system semantics. + path_parts = path.split('/') + head_parts = [] + for part in path_parts[:-1]: + if part == '..': + head_parts.pop() # IndexError if more '..' than prior parts + elif part and part != '.': + head_parts.append( part ) + if path_parts: + tail_part = path_parts.pop() + if tail_part: + if tail_part == '..': + head_parts.pop() + tail_part = '' + elif tail_part == '.': + tail_part = '' + else: + tail_part = '' + + splitpath = ('/' + '/'.join(head_parts), tail_part) + collapsed_path = "/".join(splitpath) + + return collapsed_path + + + +nobody = None + +def nobody_uid(): + """Internal routine to get nobody's uid""" + global nobody + if nobody: + return nobody + try: + import pwd + except ImportError: + return -1 + try: + nobody = pwd.getpwnam('nobody')[2] + except KeyError: + nobody = 1 + max(x[2] for x in pwd.getpwall()) + return nobody + + +def executable(path): + """Test for executable file.""" + return os.access(path, os.X_OK) + + +class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + + """Complete HTTP server with GET, HEAD and POST commands. + + GET and HEAD also support running CGI scripts. + + The POST command is *only* implemented for CGI scripts. + + """ + + # Determine platform specifics + have_fork = hasattr(os, 'fork') + + # Make rfile unbuffered -- we need to read one line and then pass + # the rest to a subprocess, so we can't use buffered input. + rbufsize = 0 + + def do_POST(self): + """Serve a POST request. + + This is only implemented for CGI scripts. + + """ + + if self.is_cgi(): + self.run_cgi() + else: + self.send_error(501, "Can only POST to CGI scripts") + + def send_head(self): + """Version of send_head that support CGI scripts""" + if self.is_cgi(): + return self.run_cgi() + else: + return SimpleHTTPRequestHandler.send_head(self) + + def is_cgi(self): + """Test whether self.path corresponds to a CGI script. + + Returns True and updates the cgi_info attribute to the tuple + (dir, rest) if self.path requires running a CGI script. + Returns False otherwise. + + If any exception is raised, the caller should assume that + self.path was rejected as invalid and act accordingly. + + The default implementation tests whether the normalized url + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + + """ + collapsed_path = _url_collapse_path(self.path) + dir_sep = collapsed_path.find('/', 1) + head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] + if head in self.cgi_directories: + self.cgi_info = head, tail + return True + return False + + + cgi_directories = ['/cgi-bin', '/htbin'] + + def is_executable(self, path): + """Test whether argument path is an executable file.""" + return executable(path) + + def is_python(self, path): + """Test whether argument path is a Python script.""" + head, tail = os.path.splitext(path) + return tail.lower() in (".py", ".pyw") + + def run_cgi(self): + """Execute a CGI script.""" + path = self.path + dir, rest = self.cgi_info + + i = path.find('/', len(dir) + 1) + while i >= 0: + nextdir = path[:i] + nextrest = path[i+1:] + + scriptdir = self.translate_path(nextdir) + if os.path.isdir(scriptdir): + dir, rest = nextdir, nextrest + i = path.find('/', len(dir) + 1) + else: + break + + # find an explicit query string, if present. + i = rest.rfind('?') + if i >= 0: + rest, query = rest[:i], rest[i+1:] + else: + query = '' + + # dissect the part after the directory name into a script name & + # a possible additional path, to be stored in PATH_INFO. + i = rest.find('/') + if i >= 0: + script, rest = rest[:i], rest[i:] + else: + script, rest = rest, '' + + scriptname = dir + '/' + script + scriptfile = self.translate_path(scriptname) + if not os.path.exists(scriptfile): + self.send_error(404, "No such CGI script (%r)" % scriptname) + return + if not os.path.isfile(scriptfile): + self.send_error(403, "CGI script is not a plain file (%r)" % + scriptname) + return + ispy = self.is_python(scriptname) + if self.have_fork or not ispy: + if not self.is_executable(scriptfile): + self.send_error(403, "CGI script is not executable (%r)" % + scriptname) + return + + # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html + # XXX Much of the following could be prepared ahead of time! + env = copy.deepcopy(os.environ) + env['SERVER_SOFTWARE'] = self.version_string() + env['SERVER_NAME'] = self.server.server_name + env['GATEWAY_INTERFACE'] = 'CGI/1.1' + env['SERVER_PROTOCOL'] = self.protocol_version + env['SERVER_PORT'] = str(self.server.server_port) + env['REQUEST_METHOD'] = self.command + uqrest = urllib_parse.unquote(rest) + env['PATH_INFO'] = uqrest + env['PATH_TRANSLATED'] = self.translate_path(uqrest) + env['SCRIPT_NAME'] = scriptname + if query: + env['QUERY_STRING'] = query + env['REMOTE_ADDR'] = self.client_address[0] + authorization = self.headers.get("authorization") + if authorization: + authorization = authorization.split() + if len(authorization) == 2: + import base64, binascii + env['AUTH_TYPE'] = authorization[0] + if authorization[0].lower() == "basic": + try: + authorization = authorization[1].encode('ascii') + if utils.PY3: + # In Py3.3, was: + authorization = base64.decodebytes(authorization).\ + decode('ascii') + else: + # Backport to Py2.7: + authorization = base64.decodestring(authorization).\ + decode('ascii') + except (binascii.Error, UnicodeError): + pass + else: + authorization = authorization.split(':') + if len(authorization) == 2: + env['REMOTE_USER'] = authorization[0] + # XXX REMOTE_IDENT + if self.headers.get('content-type') is None: + env['CONTENT_TYPE'] = self.headers.get_content_type() + else: + env['CONTENT_TYPE'] = self.headers['content-type'] + length = self.headers.get('content-length') + if length: + env['CONTENT_LENGTH'] = length + referer = self.headers.get('referer') + if referer: + env['HTTP_REFERER'] = referer + accept = [] + for line in self.headers.getallmatchingheaders('accept'): + if line[:1] in "\t\n\r ": + accept.append(line.strip()) + else: + accept = accept + line[7:].split(',') + env['HTTP_ACCEPT'] = ','.join(accept) + ua = self.headers.get('user-agent') + if ua: + env['HTTP_USER_AGENT'] = ua + co = filter(None, self.headers.get_all('cookie', [])) + cookie_str = ', '.join(co) + if cookie_str: + env['HTTP_COOKIE'] = cookie_str + # XXX Other HTTP_* headers + # Since we're setting the env in the parent, provide empty + # values to override previously set values + for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', + 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): + env.setdefault(k, "") + + self.send_response(200, "Script output follows") + self.flush_headers() + + decoded_query = query.replace('+', ' ') + + if self.have_fork: + # Unix -- fork as we should + args = [script] + if '=' not in decoded_query: + args.append(decoded_query) + nobody = nobody_uid() + self.wfile.flush() # Always flush before forking + pid = os.fork() + if pid != 0: + # Parent + pid, sts = os.waitpid(pid, 0) + # throw away additional data [see bug #427345] + while select.select([self.rfile], [], [], 0)[0]: + if not self.rfile.read(1): + break + if sts: + self.log_error("CGI script exit status %#x", sts) + return + # Child + try: + try: + os.setuid(nobody) + except os.error: + pass + os.dup2(self.rfile.fileno(), 0) + os.dup2(self.wfile.fileno(), 1) + os.execve(scriptfile, args, env) + except: + self.server.handle_error(self.request, self.client_address) + os._exit(127) + + else: + # Non-Unix -- use subprocess + import subprocess + cmdline = [scriptfile] + if self.is_python(scriptfile): + interp = sys.executable + if interp.lower().endswith("w.exe"): + # On Windows, use python.exe, not pythonw.exe + interp = interp[:-5] + interp[-4:] + cmdline = [interp, '-u'] + cmdline + if '=' not in query: + cmdline.append(query) + self.log_message("command: %s", subprocess.list2cmdline(cmdline)) + try: + nbytes = int(length) + except (TypeError, ValueError): + nbytes = 0 + p = subprocess.Popen(cmdline, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env = env + ) + if self.command.lower() == "post" and nbytes > 0: + data = self.rfile.read(nbytes) + else: + data = None + # throw away additional data [see bug #427345] + while select.select([self.rfile._sock], [], [], 0)[0]: + if not self.rfile._sock.recv(1): + break + stdout, stderr = p.communicate(data) + self.wfile.write(stdout) + if stderr: + self.log_error('%s', stderr) + p.stderr.close() + p.stdout.close() + status = p.returncode + if status: + self.log_error("CGI script exit status %#x", status) + else: + self.log_message("CGI script exited OK") + + +def test(HandlerClass = BaseHTTPRequestHandler, + ServerClass = HTTPServer, protocol="HTTP/1.0", port=8000): + """Test the HTTP request handler class. + + This runs an HTTP server on port 8000 (or the first command line + argument). + + """ + server_address = ('', port) + + HandlerClass.protocol_version = protocol + httpd = ServerClass(server_address, HandlerClass) + + sa = httpd.socket.getsockname() + print("Serving HTTP on", sa[0], "port", sa[1], "...") + try: + httpd.serve_forever() + except KeyboardInterrupt: + print("\nKeyboard interrupt received, exiting.") + httpd.server_close() + sys.exit(0) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--cgi', action='store_true', + help='Run as CGI Server') + parser.add_argument('port', action='store', + default=8000, type=int, + nargs='?', + help='Specify alternate port [default: 8000]') + args = parser.parse_args() + if args.cgi: + test(HandlerClass=CGIHTTPRequestHandler, port=args.port) + else: + test(HandlerClass=SimpleHTTPRequestHandler, port=args.port) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/misc.py b/.install/.kodi/addons/script.module.future/libs/future/backports/misc.py new file mode 100644 index 000000000..ef7520788 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/misc.py @@ -0,0 +1,940 @@ +""" +Miscellaneous function (re)definitions from the Py3.4+ standard library +for Python 2.6/2.7. + +- math.ceil (for Python 2.7) +- collections.OrderedDict (for Python 2.6) +- collections.Counter (for Python 2.6) +- collections.ChainMap (for all versions prior to Python 3.3) +- itertools.count (for Python 2.6, with step parameter) +- subprocess.check_output (for Python 2.6) +- reprlib.recursive_repr (for Python 2.6+) +- functools.cmp_to_key (for Python 2.6) +""" + +from __future__ import absolute_import + +import subprocess +from math import ceil as oldceil +from collections import Mapping, MutableMapping + +from operator import itemgetter as _itemgetter, eq as _eq +import sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from socket import getaddrinfo, SOCK_STREAM, error, socket + +from future.utils import iteritems, itervalues, PY26, PY3 + + +def ceil(x): + """ + Return the ceiling of x as an int. + This is the smallest integral value >= x. + """ + return int(oldceil(x)) + + +######################################################################## +### reprlib.recursive_repr decorator from Py3.4 +######################################################################## + +from itertools import islice + +if PY3: + try: + from _thread import get_ident + except ImportError: + from _dummy_thread import get_ident +else: + try: + from thread import get_ident + except ImportError: + from dummy_thread import get_ident + + +def recursive_repr(fillvalue='...'): + 'Decorator to make a repr function return fillvalue for a recursive call' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + +################################################################################ +### OrderedDict +################################################################################ + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + keys = MutableMapping.keys + values = MutableMapping.values + items = MutableMapping.items + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +# {{{ http://code.activestate.com/recipes/576611/ (r11) + +try: + from operator import itemgetter + from heapq import nlargest +except ImportError: + pass + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super(Counter, self).__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + return self + Counter() + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + return Counter() - self + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +def check_output(*popenargs, **kwargs): + """ + For Python 2.6 compatibility: see + http://stackoverflow.com/questions/4814970/ + """ + + if 'stdout' in kwargs: + raise ValueError('stdout argument not allowed, it will be overridden.') + process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + output, unused_err = process.communicate() + retcode = process.poll() + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + raise subprocess.CalledProcessError(retcode, cmd) + return output + + +def count(start=0, step=1): + """ + ``itertools.count`` in Py 2.6 doesn't accept a step + parameter. This is an enhanced version of ``itertools.count`` + for Py2.6 equivalent to ``itertools.count`` in Python 2.7+. + """ + while True: + yield start + start += step + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +### From the Py3.4 source code. See also: +### https://github.com/kkxue/Py2ChainMap/blob/master/py2chainmap.py +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + # Py2 compatibility: + __nonzero__ = __bool__ + + @recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + ''' + New ChainMap with a new map followed by all previous maps. If no + map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +# Re-use the same sentinel as in the Python stdlib socket module: +from socket import _GLOBAL_DEFAULT_TIMEOUT +# Was: _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Backport of 3-argument create_connection() for Py2.6. + + Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in getaddrinfo(host, port, 0, SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise error("getaddrinfo returns an empty list") + +# Backport from Py2.7 for Py2.6: +def cmp_to_key(mycmp): + """Convert a cmp= function into a key= function""" + class K(object): + __slots__ = ['obj'] + def __init__(self, obj, *args): + self.obj = obj + def __lt__(self, other): + return mycmp(self.obj, other.obj) < 0 + def __gt__(self, other): + return mycmp(self.obj, other.obj) > 0 + def __eq__(self, other): + return mycmp(self.obj, other.obj) == 0 + def __le__(self, other): + return mycmp(self.obj, other.obj) <= 0 + def __ge__(self, other): + return mycmp(self.obj, other.obj) >= 0 + def __ne__(self, other): + return mycmp(self.obj, other.obj) != 0 + def __hash__(self): + raise TypeError('hash not implemented') + return K + +# Back up our definitions above in case they're useful +_OrderedDict = OrderedDict +_Counter = Counter +_check_output = check_output +_count = count +_ceil = ceil +__count_elements = _count_elements +_recursive_repr = recursive_repr +_ChainMap = ChainMap +_create_connection = create_connection +_cmp_to_key = cmp_to_key + +# Overwrite the definitions above with the usual ones +# from the standard library: +if sys.version_info >= (2, 7): + from collections import OrderedDict, Counter + from itertools import count + from functools import cmp_to_key + try: + from subprocess import check_output + except ImportError: + # Not available. This happens with Google App Engine: see issue #231 + pass + from socket import create_connection + +if sys.version_info >= (3, 0): + from math import ceil + from collections import _count_elements + +if sys.version_info >= (3, 3): + from reprlib import recursive_repr + from collections import ChainMap diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/socket.py b/.install/.kodi/addons/script.module.future/libs/future/backports/socket.py new file mode 100644 index 000000000..930e1dae6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/socket.py @@ -0,0 +1,454 @@ +# Wrapper module for _socket, providing some additional facilities +# implemented in Python. + +"""\ +This module provides socket operations and some related functions. +On Unix, it supports IP (Internet Protocol) and Unix domain sockets. +On other systems, it only supports IP. Functions specific for a +socket are available as methods of the socket object. + +Functions: + +socket() -- create a new socket object +socketpair() -- create a pair of new socket objects [*] +fromfd() -- create a socket object from an open file descriptor [*] +fromshare() -- create a socket object from data received from socket.share() [*] +gethostname() -- return the current hostname +gethostbyname() -- map a hostname to its IP number +gethostbyaddr() -- map an IP number or hostname to DNS info +getservbyname() -- map a service name and a protocol name to a port number +getprotobyname() -- map a protocol name (e.g. 'tcp') to a number +ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order +htons(), htonl() -- convert 16, 32 bit int from host to network byte order +inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format +inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) +socket.getdefaulttimeout() -- get the default timeout value +socket.setdefaulttimeout() -- set the default timeout value +create_connection() -- connects to an address, with an optional timeout and + optional source address. + + [*] not available on all platforms! + +Special objects: + +SocketType -- type object for socket objects +error -- exception raised for I/O errors +has_ipv6 -- boolean value indicating if IPv6 is supported + +Integer constants: + +AF_INET, AF_UNIX -- socket domains (first argument to socket() call) +SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) + +Many other constants may be defined; these may be used in calls to +the setsockopt() and getsockopt() methods. +""" + +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import super + +import _socket +from _socket import * + +import os, sys, io + +try: + import errno +except ImportError: + errno = None +EBADF = getattr(errno, 'EBADF', 9) +EAGAIN = getattr(errno, 'EAGAIN', 11) +EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11) + +__all__ = ["getfqdn", "create_connection"] +__all__.extend(os._get_exports_list(_socket)) + + +_realsocket = socket + +# WSA error codes +if sys.platform.lower().startswith("win"): + errorTab = {} + errorTab[10004] = "The operation was interrupted." + errorTab[10009] = "A bad file handle was passed." + errorTab[10013] = "Permission denied." + errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT + errorTab[10022] = "An invalid operation was attempted." + errorTab[10035] = "The socket operation would block" + errorTab[10036] = "A blocking operation is already in progress." + errorTab[10048] = "The network address is in use." + errorTab[10054] = "The connection has been reset." + errorTab[10058] = "The network has been shut down." + errorTab[10060] = "The operation timed out." + errorTab[10061] = "Connection refused." + errorTab[10063] = "The name is too long." + errorTab[10064] = "The host is down." + errorTab[10065] = "The host is unreachable." + __all__.append("errorTab") + + +class socket(_socket.socket): + + """A subclass of _socket.socket adding the makefile() method.""" + + __slots__ = ["__weakref__", "_io_refs", "_closed"] + + def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None): + if fileno is None: + _socket.socket.__init__(self, family, type, proto) + else: + _socket.socket.__init__(self, family, type, proto, fileno) + self._io_refs = 0 + self._closed = False + + def __enter__(self): + return self + + def __exit__(self, *args): + if not self._closed: + self.close() + + def __repr__(self): + """Wrap __repr__() to reveal the real class name.""" + s = _socket.socket.__repr__(self) + if s.startswith(" socket object + + Return a new socket object connected to the same system resource. + """ + fd = dup(self.fileno()) + sock = self.__class__(self.family, self.type, self.proto, fileno=fd) + sock.settimeout(self.gettimeout()) + return sock + + def accept(self): + """accept() -> (socket object, address info) + + Wait for an incoming connection. Return a new socket + representing the connection, and the address of the client. + For IP sockets, the address info is a pair (hostaddr, port). + """ + fd, addr = self._accept() + sock = socket(self.family, self.type, self.proto, fileno=fd) + # Issue #7995: if no default timeout is set and the listening + # socket had a (non-zero) timeout, force the new socket in blocking + # mode to override platform-specific socket flags inheritance. + if getdefaulttimeout() is None and self.gettimeout(): + sock.setblocking(True) + return sock, addr + + def makefile(self, mode="r", buffering=None, **_3to2kwargs): + """makefile(...) -> an I/O stream connected to the socket + + The arguments are as for io.open() after the filename, + except the only mode characters supported are 'r', 'w' and 'b'. + The semantics are similar too. (XXX refactor to share code?) + """ + if 'newline' in _3to2kwargs: newline = _3to2kwargs['newline']; del _3to2kwargs['newline'] + else: newline = None + if 'errors' in _3to2kwargs: errors = _3to2kwargs['errors']; del _3to2kwargs['errors'] + else: errors = None + if 'encoding' in _3to2kwargs: encoding = _3to2kwargs['encoding']; del _3to2kwargs['encoding'] + else: encoding = None + for c in mode: + if c not in ("r", "w", "b"): + raise ValueError("invalid mode %r (only r, w, b allowed)") + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def _decref_socketios(self): + if self._io_refs > 0: + self._io_refs -= 1 + if self._closed: + self.close() + + def _real_close(self, _ss=_socket.socket): + # This function should not reference any globals. See issue #808164. + _ss.close(self) + + def close(self): + # This function should not reference any globals. See issue #808164. + self._closed = True + if self._io_refs <= 0: + self._real_close() + + def detach(self): + """detach() -> file descriptor + + Close the socket object without closing the underlying file descriptor. + The object cannot be used after this call, but the file descriptor + can be reused for other purposes. The file descriptor is returned. + """ + self._closed = True + return super().detach() + +def fromfd(fd, family, type, proto=0): + """ fromfd(fd, family, type[, proto]) -> socket object + + Create a socket object from a duplicate of the given file + descriptor. The remaining arguments are the same as for socket(). + """ + nfd = dup(fd) + return socket(family, type, proto, nfd) + +if hasattr(_socket.socket, "share"): + def fromshare(info): + """ fromshare(info) -> socket object + + Create a socket object from a the bytes object returned by + socket.share(pid). + """ + return socket(0, 0, 0, info) + +if hasattr(_socket, "socketpair"): + + def socketpair(family=None, type=SOCK_STREAM, proto=0): + """socketpair([family[, type[, proto]]]) -> (socket object, socket object) + + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is + AF_UNIX if defined on the platform; otherwise, the default is AF_INET. + """ + if family is None: + try: + family = AF_UNIX + except NameError: + family = AF_INET + a, b = _socket.socketpair(family, type, proto) + a = socket(family, type, proto, a.detach()) + b = socket(family, type, proto, b.detach()) + return a, b + + +_blocking_errnos = set([EAGAIN, EWOULDBLOCK]) + +class SocketIO(io.RawIOBase): + + """Raw I/O implementation for stream sockets. + + This class supports the makefile() method on sockets. It provides + the raw I/O interface on top of a socket object. + """ + + # One might wonder why not let FileIO do the job instead. There are two + # main reasons why FileIO is not adapted: + # - it wouldn't work under Windows (where you can't used read() and + # write() on a socket handle) + # - it wouldn't work with socket timeouts (FileIO would ignore the + # timeout and consider the socket non-blocking) + + # XXX More docs + + def __init__(self, sock, mode): + if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): + raise ValueError("invalid mode: %r" % mode) + io.RawIOBase.__init__(self) + self._sock = sock + if "b" not in mode: + mode += "b" + self._mode = mode + self._reading = "r" in mode + self._writing = "w" in mode + self._timeout_occurred = False + + def readinto(self, b): + """Read up to len(b) bytes into the writable buffer *b* and return + the number of bytes read. If the socket is non-blocking and no bytes + are available, None is returned. + + If *b* is non-empty, a 0 return value indicates that the connection + was shutdown at the other end. + """ + self._checkClosed() + self._checkReadable() + if self._timeout_occurred: + raise IOError("cannot read from timed out object") + while True: + try: + return self._sock.recv_into(b) + except timeout: + self._timeout_occurred = True + raise + # except InterruptedError: + # continue + except error as e: + if e.args[0] in _blocking_errnos: + return None + raise + + def write(self, b): + """Write the given bytes or bytearray object *b* to the socket + and return the number of bytes written. This can be less than + len(b) if not all data could be written. If the socket is + non-blocking and no bytes could be written None is returned. + """ + self._checkClosed() + self._checkWritable() + try: + return self._sock.send(b) + except error as e: + # XXX what about EINTR? + if e.args[0] in _blocking_errnos: + return None + raise + + def readable(self): + """True if the SocketIO is open for reading. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._reading + + def writable(self): + """True if the SocketIO is open for writing. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._writing + + def seekable(self): + """True if the SocketIO is open for seeking. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return super().seekable() + + def fileno(self): + """Return the file descriptor of the underlying socket. + """ + self._checkClosed() + return self._sock.fileno() + + @property + def name(self): + if not self.closed: + return self.fileno() + else: + return -1 + + @property + def mode(self): + return self._mode + + def close(self): + """Close the SocketIO object. This doesn't close the underlying + socket, except if all references to it have disappeared. + """ + if self.closed: + return + io.RawIOBase.close(self) + self._sock._decref_socketios() + self._sock = None + + +def getfqdn(name=''): + """Get fully qualified domain name from name. + + An empty argument is interpreted as meaning the local host. + + First the hostname returned by gethostbyaddr() is checked, then + possibly existing aliases. In case no FQDN is available, hostname + from gethostname() is returned. + """ + name = name.strip() + if not name or name == '0.0.0.0': + name = gethostname() + try: + hostname, aliases, ipaddrs = gethostbyaddr(name) + except error: + pass + else: + aliases.insert(0, hostname) + for name in aliases: + if '.' in name: + break + else: + name = hostname + return name + + +# Re-use the same sentinel as in the Python stdlib socket module: +from socket import _GLOBAL_DEFAULT_TIMEOUT +# Was: _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in getaddrinfo(host, port, 0, SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise error("getaddrinfo returns an empty list") diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/socketserver.py b/.install/.kodi/addons/script.module.future/libs/future/backports/socketserver.py new file mode 100644 index 000000000..d1e24a6dd --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/socketserver.py @@ -0,0 +1,747 @@ +"""Generic socket server classes. + +This module tries to capture the various aspects of defining a server: + +For socket-based servers: + +- address family: + - AF_INET{,6}: IP (Internet Protocol) sockets (default) + - AF_UNIX: Unix domain sockets + - others, e.g. AF_DECNET are conceivable (see +- socket type: + - SOCK_STREAM (reliable stream, e.g. TCP) + - SOCK_DGRAM (datagrams, e.g. UDP) + +For request-based servers (including socket-based): + +- client address verification before further looking at the request + (This is actually a hook for any processing that needs to look + at the request before anything else, e.g. logging) +- how to handle multiple requests: + - synchronous (one request is handled at a time) + - forking (each request is handled by a new process) + - threading (each request is handled by a new thread) + +The classes in this module favor the server type that is simplest to +write: a synchronous TCP/IP server. This is bad class design, but +save some typing. (There's also the issue that a deep class hierarchy +slows down method lookups.) + +There are five classes in an inheritance diagram, four of which represent +synchronous servers of four types: + + +------------+ + | BaseServer | + +------------+ + | + v + +-----------+ +------------------+ + | TCPServer |------->| UnixStreamServer | + +-----------+ +------------------+ + | + v + +-----------+ +--------------------+ + | UDPServer |------->| UnixDatagramServer | + +-----------+ +--------------------+ + +Note that UnixDatagramServer derives from UDPServer, not from +UnixStreamServer -- the only difference between an IP and a Unix +stream server is the address family, which is simply repeated in both +unix server classes. + +Forking and threading versions of each type of server can be created +using the ForkingMixIn and ThreadingMixIn mix-in classes. For +instance, a threading UDP server class is created as follows: + + class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass + +The Mix-in class must come first, since it overrides a method defined +in UDPServer! Setting the various member variables also changes +the behavior of the underlying server mechanism. + +To implement a service, you must derive a class from +BaseRequestHandler and redefine its handle() method. You can then run +various versions of the service by combining one of the server classes +with your request handler class. + +The request handler class must be different for datagram or stream +services. This can be hidden by using the request handler +subclasses StreamRequestHandler or DatagramRequestHandler. + +Of course, you still have to use your head! + +For instance, it makes no sense to use a forking server if the service +contains state in memory that can be modified by requests (since the +modifications in the child process would never reach the initial state +kept in the parent process and passed to each child). In this case, +you can use a threading server, but you will probably have to use +locks to avoid two requests that come in nearly simultaneous to apply +conflicting changes to the server state. + +On the other hand, if you are building e.g. an HTTP server, where all +data is stored externally (e.g. in the file system), a synchronous +class will essentially render the service "deaf" while one request is +being handled -- which may be for a very long time if a client is slow +to read all the data it has requested. Here a threading or forking +server is appropriate. + +In some cases, it may be appropriate to process part of a request +synchronously, but to finish processing in a forked child depending on +the request data. This can be implemented by using a synchronous +server and doing an explicit fork in the request handler class +handle() method. + +Another approach to handling multiple simultaneous requests in an +environment that supports neither threads nor fork (or where these are +too expensive or inappropriate for the service) is to maintain an +explicit table of partially finished requests and to use select() to +decide which request to work on next (or whether to handle a new +incoming request). This is particularly important for stream services +where each client can potentially be connected for a long time (if +threads or subprocesses cannot be used). + +Future work: +- Standard classes for Sun RPC (which uses either UDP or TCP) +- Standard mix-in classes to implement various authentication + and encryption schemes +- Standard framework for select-based multiplexing + +XXX Open problems: +- What to do with out-of-band data? + +BaseServer: +- split generic "request" functionality out into BaseServer class. + Copyright (C) 2000 Luke Kenneth Casson Leighton + + example: read entries from a SQL database (requires overriding + get_request() to return a table entry from the database). + entry is processed by a RequestHandlerClass. + +""" + +# Author of the BaseServer patch: Luke Kenneth Casson Leighton + +# XXX Warning! +# There is a test suite for this module, but it cannot be run by the +# standard regression test. +# To run it manually, run Lib/test/test_socketserver.py. + +from __future__ import (absolute_import, print_function) + +__version__ = "0.4" + + +import socket +import select +import sys +import os +import errno +try: + import threading +except ImportError: + import dummy_threading as threading + +__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", + "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", + "StreamRequestHandler","DatagramRequestHandler", + "ThreadingMixIn", "ForkingMixIn"] +if hasattr(socket, "AF_UNIX"): + __all__.extend(["UnixStreamServer","UnixDatagramServer", + "ThreadingUnixStreamServer", + "ThreadingUnixDatagramServer"]) + +def _eintr_retry(func, *args): + """restart a system call interrupted by EINTR""" + while True: + try: + return func(*args) + except OSError as e: + if e.errno != errno.EINTR: + raise + +class BaseServer(object): + + """Base class for server classes. + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you do not use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - server_close() + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - service_actions() + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - allow_reuse_address + + Instance variables: + + - RequestHandlerClass + - socket + + """ + + timeout = None + + def __init__(self, server_address, RequestHandlerClass): + """Constructor. May be extended, do not override.""" + self.server_address = server_address + self.RequestHandlerClass = RequestHandlerClass + self.__is_shut_down = threading.Event() + self.__shutdown_request = False + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + pass + + def serve_forever(self, poll_interval=0.5): + """Handle one request at a time until shutdown. + + Polls for shutdown every poll_interval seconds. Ignores + self.timeout. If you need to do periodic tasks, do them in + another thread. + """ + self.__is_shut_down.clear() + try: + while not self.__shutdown_request: + # XXX: Consider using another file descriptor or + # connecting to the socket to wake this up instead of + # polling. Polling reduces our responsiveness to a + # shutdown request and wastes cpu at all other times. + r, w, e = _eintr_retry(select.select, [self], [], [], + poll_interval) + if self in r: + self._handle_request_noblock() + + self.service_actions() + finally: + self.__shutdown_request = False + self.__is_shut_down.set() + + def shutdown(self): + """Stops the serve_forever loop. + + Blocks until the loop has finished. This must be called while + serve_forever() is running in another thread, or it will + deadlock. + """ + self.__shutdown_request = True + self.__is_shut_down.wait() + + def service_actions(self): + """Called by the serve_forever() loop. + + May be overridden by a subclass / Mixin to implement any code that + needs to be run during the loop. + """ + pass + + # The distinction between handling, getting, processing and + # finishing a request is fairly arbitrary. Remember: + # + # - handle_request() is the top-level call. It calls + # select, get_request(), verify_request() and process_request() + # - get_request() is different for stream or datagram sockets + # - process_request() is the place that may fork a new process + # or create a new thread to finish the request + # - finish_request() instantiates the request handler class; + # this constructor will handle the request all by itself + + def handle_request(self): + """Handle one request, possibly blocking. + + Respects self.timeout. + """ + # Support people who used socket.settimeout() to escape + # handle_request before self.timeout was available. + timeout = self.socket.gettimeout() + if timeout is None: + timeout = self.timeout + elif self.timeout is not None: + timeout = min(timeout, self.timeout) + fd_sets = _eintr_retry(select.select, [self], [], [], timeout) + if not fd_sets[0]: + self.handle_timeout() + return + self._handle_request_noblock() + + def _handle_request_noblock(self): + """Handle one request, without blocking. + + I assume that select.select has returned that the socket is + readable before this function was called, so there should be + no risk of blocking in get_request(). + """ + try: + request, client_address = self.get_request() + except socket.error: + return + if self.verify_request(request, client_address): + try: + self.process_request(request, client_address) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def handle_timeout(self): + """Called if no new request arrives within self.timeout. + + Overridden by ForkingMixIn. + """ + pass + + def verify_request(self, request, client_address): + """Verify the request. May be overridden. + + Return True if we should proceed with this request. + + """ + return True + + def process_request(self, request, client_address): + """Call finish_request. + + Overridden by ForkingMixIn and ThreadingMixIn. + + """ + self.finish_request(request, client_address) + self.shutdown_request(request) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + pass + + def finish_request(self, request, client_address): + """Finish one request by instantiating RequestHandlerClass.""" + self.RequestHandlerClass(request, client_address, self) + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + pass + + def handle_error(self, request, client_address): + """Handle an error gracefully. May be overridden. + + The default is to print a traceback and continue. + + """ + print('-'*40) + print('Exception happened during processing of request from', end=' ') + print(client_address) + import traceback + traceback.print_exc() # XXX But this goes to stderr! + print('-'*40) + + +class TCPServer(BaseServer): + + """Base class for various socket-based server classes. + + Defaults to synchronous IP stream (i.e., TCP). + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass, bind_and_activate=True) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you don't use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - request_queue_size (only for stream sockets) + - allow_reuse_address + + Instance variables: + + - server_address + - RequestHandlerClass + - socket + + """ + + address_family = socket.AF_INET + + socket_type = socket.SOCK_STREAM + + request_queue_size = 5 + + allow_reuse_address = False + + def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): + """Constructor. May be extended, do not override.""" + BaseServer.__init__(self, server_address, RequestHandlerClass) + self.socket = socket.socket(self.address_family, + self.socket_type) + if bind_and_activate: + self.server_bind() + self.server_activate() + + def server_bind(self): + """Called by constructor to bind the socket. + + May be overridden. + + """ + if self.allow_reuse_address: + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.bind(self.server_address) + self.server_address = self.socket.getsockname() + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + self.socket.listen(self.request_queue_size) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + self.socket.close() + + def fileno(self): + """Return socket file number. + + Interface required by select(). + + """ + return self.socket.fileno() + + def get_request(self): + """Get the request and client address from the socket. + + May be overridden. + + """ + return self.socket.accept() + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + try: + #explicitly shutdown. socket.close() merely releases + #the socket and waits for GC to perform the actual close. + request.shutdown(socket.SHUT_WR) + except socket.error: + pass #some platforms may raise ENOTCONN here + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + request.close() + + +class UDPServer(TCPServer): + + """UDP server class.""" + + allow_reuse_address = False + + socket_type = socket.SOCK_DGRAM + + max_packet_size = 8192 + + def get_request(self): + data, client_addr = self.socket.recvfrom(self.max_packet_size) + return (data, self.socket), client_addr + + def server_activate(self): + # No need to call listen() for UDP. + pass + + def shutdown_request(self, request): + # No need to shutdown anything. + self.close_request(request) + + def close_request(self, request): + # No need to close anything. + pass + +class ForkingMixIn(object): + + """Mix-in class to handle each request in a new process.""" + + timeout = 300 + active_children = None + max_children = 40 + + def collect_children(self): + """Internal routine to wait for children that have exited.""" + if self.active_children is None: return + while len(self.active_children) >= self.max_children: + # XXX: This will wait for any child process, not just ones + # spawned by this library. This could confuse other + # libraries that expect to be able to wait for their own + # children. + try: + pid, status = os.waitpid(0, 0) + except os.error: + pid = None + if pid not in self.active_children: continue + self.active_children.remove(pid) + + # XXX: This loop runs more system calls than it ought + # to. There should be a way to put the active_children into a + # process group and then use os.waitpid(-pgid) to wait for any + # of that set, but I couldn't find a way to allocate pgids + # that couldn't collide. + for child in self.active_children: + try: + pid, status = os.waitpid(child, os.WNOHANG) + except os.error: + pid = None + if not pid: continue + try: + self.active_children.remove(pid) + except ValueError as e: + raise ValueError('%s. x=%d and list=%r' % (e.message, pid, + self.active_children)) + + def handle_timeout(self): + """Wait for zombies after self.timeout seconds of inactivity. + + May be extended, do not override. + """ + self.collect_children() + + def service_actions(self): + """Collect the zombie child processes regularly in the ForkingMixIn. + + service_actions is called in the BaseServer's serve_forver loop. + """ + self.collect_children() + + def process_request(self, request, client_address): + """Fork a new subprocess to process the request.""" + pid = os.fork() + if pid: + # Parent process + if self.active_children is None: + self.active_children = [] + self.active_children.append(pid) + self.close_request(request) + return + else: + # Child process. + # This must never return, hence os._exit()! + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + os._exit(0) + except: + try: + self.handle_error(request, client_address) + self.shutdown_request(request) + finally: + os._exit(1) + + +class ThreadingMixIn(object): + """Mix-in class to handle each request in a new thread.""" + + # Decides how threads will act upon termination of the + # main process + daemon_threads = False + + def process_request_thread(self, request, client_address): + """Same as in BaseServer but as a thread. + + In addition, exception handling is done here. + + """ + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def process_request(self, request, client_address): + """Start a new thread to process the request.""" + t = threading.Thread(target = self.process_request_thread, + args = (request, client_address)) + t.daemon = self.daemon_threads + t.start() + + +class ForkingUDPServer(ForkingMixIn, UDPServer): pass +class ForkingTCPServer(ForkingMixIn, TCPServer): pass + +class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass +class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass + +if hasattr(socket, 'AF_UNIX'): + + class UnixStreamServer(TCPServer): + address_family = socket.AF_UNIX + + class UnixDatagramServer(UDPServer): + address_family = socket.AF_UNIX + + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass + + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass + +class BaseRequestHandler(object): + + """Base class for request handler classes. + + This class is instantiated for each request to be handled. The + constructor sets the instance variables request, client_address + and server, and then calls the handle() method. To implement a + specific service, all you need to do is to derive a class which + defines a handle() method. + + The handle() method can find the request as self.request, the + client address as self.client_address, and the server (in case it + needs access to per-server information) as self.server. Since a + separate instance is created for each request, the handle() method + can define arbitrary other instance variariables. + + """ + + def __init__(self, request, client_address, server): + self.request = request + self.client_address = client_address + self.server = server + self.setup() + try: + self.handle() + finally: + self.finish() + + def setup(self): + pass + + def handle(self): + pass + + def finish(self): + pass + + +# The following two classes make it possible to use the same service +# class for stream or datagram servers. +# Each class sets up these instance variables: +# - rfile: a file object from which receives the request is read +# - wfile: a file object to which the reply is written +# When the handle() method returns, wfile is flushed properly + + +class StreamRequestHandler(BaseRequestHandler): + + """Define self.rfile and self.wfile for stream sockets.""" + + # Default buffer sizes for rfile, wfile. + # We default rfile to buffered because otherwise it could be + # really slow for large data (a getc() call per byte); we make + # wfile unbuffered because (a) often after a write() we want to + # read and we need to flush the line; (b) big writes to unbuffered + # files are typically optimized by stdio even when big reads + # aren't. + rbufsize = -1 + wbufsize = 0 + + # A timeout to apply to the request socket, if not None. + timeout = None + + # Disable nagle algorithm for this socket, if True. + # Use only when wbufsize != 0, to avoid small packets. + disable_nagle_algorithm = False + + def setup(self): + self.connection = self.request + if self.timeout is not None: + self.connection.settimeout(self.timeout) + if self.disable_nagle_algorithm: + self.connection.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, True) + self.rfile = self.connection.makefile('rb', self.rbufsize) + self.wfile = self.connection.makefile('wb', self.wbufsize) + + def finish(self): + if not self.wfile.closed: + try: + self.wfile.flush() + except socket.error: + # An final socket error may have occurred here, such as + # the local error ECONNABORTED. + pass + self.wfile.close() + self.rfile.close() + + +class DatagramRequestHandler(BaseRequestHandler): + + # XXX Regrettably, I cannot get this working on Linux; + # s.recvfrom() doesn't return a meaningful client address. + + """Define self.rfile and self.wfile for datagram sockets.""" + + def setup(self): + from io import BytesIO + self.packet, self.socket = self.request + self.rfile = BytesIO(self.packet) + self.wfile = BytesIO() + + def finish(self): + self.socket.sendto(self.wfile.getvalue(), self.client_address) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/backports/test/__init__.py new file mode 100644 index 000000000..0bba5e69a --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/__init__.py @@ -0,0 +1,9 @@ +""" +test package backported for python-future. + +Its primary purpose is to allow use of "import test.support" for running +the Python standard library unit tests using the new Python 3 stdlib +import location. + +Python 3 renamed test.test_support to test.support. +""" diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/badcert.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/badcert.pem new file mode 100644 index 000000000..c4191460f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/badcert.pem @@ -0,0 +1,36 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/badkey.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/badkey.pem new file mode 100644 index 000000000..1c8a95571 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/badkey.pem @@ -0,0 +1,40 @@ +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/dh512.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/dh512.pem new file mode 100644 index 000000000..200d16cd8 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/dh512.pem @@ -0,0 +1,9 @@ +-----BEGIN DH PARAMETERS----- +MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak +XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC +-----END DH PARAMETERS----- + +These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols" +(http://www.skip-vpn.org/spec/numbers.html). +See there for how they were generated. +Note that g is not a generator, but this is not a problem since p is a safe prime. diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/https_svn_python_org_root.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/https_svn_python_org_root.pem new file mode 100644 index 000000000..e7dfc8294 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/https_svn_python_org_root.pem @@ -0,0 +1,41 @@ +-----BEGIN CERTIFICATE----- +MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 +IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB +IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA +Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO +BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi +MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ +ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ +8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 +zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y +fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 +w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc +G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k +epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q +laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ +QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU +fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 +YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w +ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY +gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe +MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 +IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy +dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw +czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 +dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl +aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC +AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg +b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB +ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc +nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg +18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c +gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl +Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY +sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T +SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF +CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum +GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk +zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW +omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert.passwd.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert.passwd.pem new file mode 100644 index 000000000..e90574881 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert.passwd.pem @@ -0,0 +1,33 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A + +kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c +u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA +AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr +Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ +YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P +6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ +noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 +94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l +7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo +cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO +zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt +L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo +2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert.pem new file mode 100644 index 000000000..64318aa2e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert.pem @@ -0,0 +1,31 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm +LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 +ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP +USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt +CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq +SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK +UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y +BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ +ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 +oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik +eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F +0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS +x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ +SPIXQuT8RMPDVNQ= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert2.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert2.pem new file mode 100644 index 000000000..e8a9e082b --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/keycert2.pem @@ -0,0 +1,31 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAJnsJZVrppL+W5I9 +zGQrrawWwE5QJpBK9nWw17mXrZ03R1cD9BamLGivVISbPlRlAVnZBEyh1ATpsB7d +CUQ+WHEvALquvx4+Yw5l+fXeiYRjrLRBYZuVy8yNtXzU3iWcGObcYRkUdiXdOyP7 +sLF2YZHRvQZpzgDBKkrraeQ81w21AgMBAAECgYBEm7n07FMHWlE+0kT0sXNsLYfy +YE+QKZnJw9WkaDN+zFEEPELkhZVt5BjsMraJr6v2fIEqF0gGGJPkbenffVq2B5dC +lWUOxvJHufMK4sM3Cp6s/gOp3LP+QkzVnvJSfAyZU6l+4PGX5pLdUsXYjPxgzjzL +S36tF7/2Uv1WePyLUQJBAMsPhYzUXOPRgmbhcJiqi9A9c3GO8kvSDYTCKt3VMnqz +HBn6MQ4VQasCD1F+7jWTI0FU/3vdw8non/Fj8hhYqZcCQQDCDRdvmZqDiZnpMqDq +L6ZSrLTVtMvZXZbgwForaAD9uHj51TME7+eYT7EG2YCgJTXJ4YvRJEnPNyskwdKt +vTSTAkEAtaaN/vyemEJ82BIGStwONNw0ILsSr5cZ9tBHzqiA/tipY+e36HRFiXhP +QcU9zXlxyWkDH8iz9DSAmE2jbfoqwwJANlMJ65E543cjIlitGcKLMnvtCCLcKpb7 +xSG0XJB6Lo11OKPJ66jp0gcFTSCY1Lx2CXVd+gfJrfwI1Pp562+bhwJBAJ9IfDPU +R8OpO9v1SGd8x33Owm7uXOpB9d63/T70AD1QOXjKUC4eXYbt0WWfWuny/RNPRuyh +w7DXSfUF+kPKolU= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICXTCCAcagAwIBAgIJAIO3upAG445fMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTAeFw0x +MDEwMDkxNTAxMDBaFw0yMDEwMDYxNTAxMDBaMGIxCzAJBgNVBAYTAlhZMRcwFQYD +VQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZv +dW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF +AAOBjQAwgYkCgYEAmewllWumkv5bkj3MZCutrBbATlAmkEr2dbDXuZetnTdHVwP0 +FqYsaK9UhJs+VGUBWdkETKHUBOmwHt0JRD5YcS8Auq6/Hj5jDmX59d6JhGOstEFh +m5XLzI21fNTeJZwY5txhGRR2Jd07I/uwsXZhkdG9BmnOAMEqSutp5DzXDbUCAwEA +AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB +AH+iMClLLGSaKWgwXsmdVo4FhTZZHo8Uprrtg3N9FxEeE50btpDVQysgRt5ias3K +m+bME9zbKwvbVWD5zZdjus4pDgzwF/iHyccL8JyYhxOvS/9zmvAtFXj/APIIbZFp +IT75d9f88ScIGEtknZQejnrdhB64tYki/EqluiuKBqKD +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/nokia.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/nokia.pem new file mode 100644 index 000000000..0d044df43 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/nokia.pem @@ -0,0 +1,31 @@ +# Certificate for projects.developer.nokia.com:443 (see issue 13034) +-----BEGIN CERTIFICATE----- +MIIFLDCCBBSgAwIBAgIQLubqdkCgdc7lAF9NfHlUmjANBgkqhkiG9w0BAQUFADCB +vDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTswOQYDVQQLEzJUZXJtcyBvZiB1c2Ug +YXQgaHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYSAoYykxMDE2MDQGA1UEAxMt +VmVyaVNpZ24gQ2xhc3MgMyBJbnRlcm5hdGlvbmFsIFNlcnZlciBDQSAtIEczMB4X +DTExMDkyMTAwMDAwMFoXDTEyMDkyMDIzNTk1OVowcTELMAkGA1UEBhMCRkkxDjAM +BgNVBAgTBUVzcG9vMQ4wDAYDVQQHFAVFc3BvbzEOMAwGA1UEChQFTm9raWExCzAJ +BgNVBAsUAkJJMSUwIwYDVQQDFBxwcm9qZWN0cy5kZXZlbG9wZXIubm9raWEuY29t +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCr92w1bpHYSYxUEx8N/8Iddda2 +lYi+aXNtQfV/l2Fw9Ykv3Ipw4nLeGTj18FFlAZgMdPRlgrzF/NNXGw/9l3/qKdow +CypkQf8lLaxb9Ze1E/KKmkRJa48QTOqvo6GqKuTI6HCeGlG1RxDb8YSKcQWLiytn +yj3Wp4MgRQO266xmMQIDAQABo4IB9jCCAfIwQQYDVR0RBDowOIIccHJvamVjdHMu +ZGV2ZWxvcGVyLm5va2lhLmNvbYIYcHJvamVjdHMuZm9ydW0ubm9raWEuY29tMAkG +A1UdEwQCMAAwCwYDVR0PBAQDAgWgMEEGA1UdHwQ6MDgwNqA0oDKGMGh0dHA6Ly9T +VlJJbnRsLUczLWNybC52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNybDBEBgNVHSAE +PTA7MDkGC2CGSAGG+EUBBxcDMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LnZl +cmlzaWduLmNvbS9ycGEwKAYDVR0lBCEwHwYJYIZIAYb4QgQBBggrBgEFBQcDAQYI +KwYBBQUHAwIwcgYIKwYBBQUHAQEEZjBkMCQGCCsGAQUFBzABhhhodHRwOi8vb2Nz +cC52ZXJpc2lnbi5jb20wPAYIKwYBBQUHMAKGMGh0dHA6Ly9TVlJJbnRsLUczLWFp +YS52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNlcjBuBggrBgEFBQcBDARiMGChXqBc +MFowWDBWFglpbWFnZS9naWYwITAfMAcGBSsOAwIaBBRLa7kolgYMu9BSOJsprEsH +iyEFGDAmFiRodHRwOi8vbG9nby52ZXJpc2lnbi5jb20vdnNsb2dvMS5naWYwDQYJ +KoZIhvcNAQEFBQADggEBACQuPyIJqXwUyFRWw9x5yDXgMW4zYFopQYOw/ItRY522 +O5BsySTh56BWS6mQB07XVfxmYUGAvRQDA5QHpmY8jIlNwSmN3s8RKo+fAtiNRlcL +x/mWSfuMs3D/S6ev3D6+dpEMZtjrhOdctsarMKp8n/hPbwhAbg5hVjpkW5n8vz2y +0KxvvkA1AxpLwpVv7OlK17ttzIHw8bp9HTlHBU5s8bKz4a565V/a5HI0CSEv/+0y +ko4/ghTnZc1CkmUngKKeFMSah/mT/xAh8XnE2l1AazFa8UKuYki1e+ArHaGZc4ix +UYOtiRphwfuYQhRZ7qX9q2MMkCMI65XNK/SaFrAbbG0= +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/nullbytecert.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/nullbytecert.pem new file mode 100644 index 000000000..447186c95 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/nullbytecert.pem @@ -0,0 +1,90 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org + Validity + Not Before: Aug 7 13:11:52 2013 GMT + Not After : Aug 7 13:12:52 2013 GMT + Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: + 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: + 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: + 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: + 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: + 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: + a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: + 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: + ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: + 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: + 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: + 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: + f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: + f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: + ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: + d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: + 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: + 2f:85 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: critical + CA:FALSE + X509v3 Subject Key Identifier: + 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C + X509v3 Key Usage: + Digital Signature, Non Repudiation, Key Encipherment + X509v3 Subject Alternative Name: + ************************************************************* + WARNING: The values for DNS, email and URI are WRONG. OpenSSL + doesn't print the text after a NULL byte. + ************************************************************* + DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 + Signature Algorithm: sha1WithRSAEncryption + ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: + a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: + 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: + 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: + 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: + de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: + 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: + 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: + d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: + 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: + 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: + 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: + 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: + 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: + c1:ca:a9:94 +-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/nullcert.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/nullcert.pem new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/pystone.py b/.install/.kodi/addons/script.module.future/libs/future/backports/test/pystone.py new file mode 100644 index 000000000..7652027b4 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/pystone.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 + +""" +"PYSTONE" Benchmark Program + +Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes) + +Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013. + + Translated from ADA to C by Rick Richardson. + Every method to preserve ADA-likeness has been used, + at the expense of C-ness. + + Translated from C to Python by Guido van Rossum. + +Version History: + + Version 1.1 corrects two bugs in version 1.0: + + First, it leaked memory: in Proc1(), NextRecord ends + up having a pointer to itself. I have corrected this + by zapping NextRecord.PtrComp at the end of Proc1(). + + Second, Proc3() used the operator != to compare a + record to None. This is rather inefficient and not + true to the intention of the original benchmark (where + a pointer comparison to None is intended; the != + operator attempts to find a method __cmp__ to do value + comparison of the record). Version 1.1 runs 5-10 + percent faster than version 1.0, so benchmark figures + of different versions can't be compared directly. + +""" + +from __future__ import print_function + +from time import clock + +LOOPS = 50000 + +__version__ = "1.1" + +[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6) + +class Record(object): + + def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0, + IntComp = 0, StringComp = 0): + self.PtrComp = PtrComp + self.Discr = Discr + self.EnumComp = EnumComp + self.IntComp = IntComp + self.StringComp = StringComp + + def copy(self): + return Record(self.PtrComp, self.Discr, self.EnumComp, + self.IntComp, self.StringComp) + +TRUE = 1 +FALSE = 0 + +def main(loops=LOOPS): + benchtime, stones = pystones(loops) + print("Pystone(%s) time for %d passes = %g" % \ + (__version__, loops, benchtime)) + print("This machine benchmarks at %g pystones/second" % stones) + + +def pystones(loops=LOOPS): + return Proc0(loops) + +IntGlob = 0 +BoolGlob = FALSE +Char1Glob = '\0' +Char2Glob = '\0' +Array1Glob = [0]*51 +Array2Glob = [x[:] for x in [Array1Glob]*51] +PtrGlb = None +PtrGlbNext = None + +def Proc0(loops=LOOPS): + global IntGlob + global BoolGlob + global Char1Glob + global Char2Glob + global Array1Glob + global Array2Glob + global PtrGlb + global PtrGlbNext + + starttime = clock() + for i in range(loops): + pass + nulltime = clock() - starttime + + PtrGlbNext = Record() + PtrGlb = Record() + PtrGlb.PtrComp = PtrGlbNext + PtrGlb.Discr = Ident1 + PtrGlb.EnumComp = Ident3 + PtrGlb.IntComp = 40 + PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING" + String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING" + Array2Glob[8][7] = 10 + + starttime = clock() + + for i in range(loops): + Proc5() + Proc4() + IntLoc1 = 2 + IntLoc2 = 3 + String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING" + EnumLoc = Ident2 + BoolGlob = not Func2(String1Loc, String2Loc) + while IntLoc1 < IntLoc2: + IntLoc3 = 5 * IntLoc1 - IntLoc2 + IntLoc3 = Proc7(IntLoc1, IntLoc2) + IntLoc1 = IntLoc1 + 1 + Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3) + PtrGlb = Proc1(PtrGlb) + CharIndex = 'A' + while CharIndex <= Char2Glob: + if EnumLoc == Func1(CharIndex, 'C'): + EnumLoc = Proc6(Ident1) + CharIndex = chr(ord(CharIndex)+1) + IntLoc3 = IntLoc2 * IntLoc1 + IntLoc2 = IntLoc3 / IntLoc1 + IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1 + IntLoc1 = Proc2(IntLoc1) + + benchtime = clock() - starttime - nulltime + if benchtime == 0.0: + loopsPerBenchtime = 0.0 + else: + loopsPerBenchtime = (loops / benchtime) + return benchtime, loopsPerBenchtime + +def Proc1(PtrParIn): + PtrParIn.PtrComp = NextRecord = PtrGlb.copy() + PtrParIn.IntComp = 5 + NextRecord.IntComp = PtrParIn.IntComp + NextRecord.PtrComp = PtrParIn.PtrComp + NextRecord.PtrComp = Proc3(NextRecord.PtrComp) + if NextRecord.Discr == Ident1: + NextRecord.IntComp = 6 + NextRecord.EnumComp = Proc6(PtrParIn.EnumComp) + NextRecord.PtrComp = PtrGlb.PtrComp + NextRecord.IntComp = Proc7(NextRecord.IntComp, 10) + else: + PtrParIn = NextRecord.copy() + NextRecord.PtrComp = None + return PtrParIn + +def Proc2(IntParIO): + IntLoc = IntParIO + 10 + while 1: + if Char1Glob == 'A': + IntLoc = IntLoc - 1 + IntParIO = IntLoc - IntGlob + EnumLoc = Ident1 + if EnumLoc == Ident1: + break + return IntParIO + +def Proc3(PtrParOut): + global IntGlob + + if PtrGlb is not None: + PtrParOut = PtrGlb.PtrComp + else: + IntGlob = 100 + PtrGlb.IntComp = Proc7(10, IntGlob) + return PtrParOut + +def Proc4(): + global Char2Glob + + BoolLoc = Char1Glob == 'A' + BoolLoc = BoolLoc or BoolGlob + Char2Glob = 'B' + +def Proc5(): + global Char1Glob + global BoolGlob + + Char1Glob = 'A' + BoolGlob = FALSE + +def Proc6(EnumParIn): + EnumParOut = EnumParIn + if not Func3(EnumParIn): + EnumParOut = Ident4 + if EnumParIn == Ident1: + EnumParOut = Ident1 + elif EnumParIn == Ident2: + if IntGlob > 100: + EnumParOut = Ident1 + else: + EnumParOut = Ident4 + elif EnumParIn == Ident3: + EnumParOut = Ident2 + elif EnumParIn == Ident4: + pass + elif EnumParIn == Ident5: + EnumParOut = Ident3 + return EnumParOut + +def Proc7(IntParI1, IntParI2): + IntLoc = IntParI1 + 2 + IntParOut = IntParI2 + IntLoc + return IntParOut + +def Proc8(Array1Par, Array2Par, IntParI1, IntParI2): + global IntGlob + + IntLoc = IntParI1 + 5 + Array1Par[IntLoc] = IntParI2 + Array1Par[IntLoc+1] = Array1Par[IntLoc] + Array1Par[IntLoc+30] = IntLoc + for IntIndex in range(IntLoc, IntLoc+2): + Array2Par[IntLoc][IntIndex] = IntLoc + Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1 + Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc] + IntGlob = 5 + +def Func1(CharPar1, CharPar2): + CharLoc1 = CharPar1 + CharLoc2 = CharLoc1 + if CharLoc2 != CharPar2: + return Ident1 + else: + return Ident2 + +def Func2(StrParI1, StrParI2): + IntLoc = 1 + while IntLoc <= 1: + if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1: + CharLoc = 'A' + IntLoc = IntLoc + 1 + if CharLoc >= 'W' and CharLoc <= 'Z': + IntLoc = 7 + if CharLoc == 'X': + return TRUE + else: + if StrParI1 > StrParI2: + IntLoc = IntLoc + 7 + return TRUE + else: + return FALSE + +def Func3(EnumParIn): + EnumLoc = EnumParIn + if EnumLoc == Ident3: return TRUE + return FALSE + +if __name__ == '__main__': + import sys + def error(msg): + print(msg, end=' ', file=sys.stderr) + print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr) + sys.exit(100) + nargs = len(sys.argv) - 1 + if nargs > 1: + error("%d arguments are too many;" % nargs) + elif nargs == 1: + try: loops = int(sys.argv[1]) + except ValueError: + error("Invalid argument %r;" % sys.argv[1]) + else: + loops = LOOPS + main(loops) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/sha256.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/sha256.pem new file mode 100644 index 000000000..d3db4b85c --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/sha256.pem @@ -0,0 +1,128 @@ +# Certificate chain for https://sha256.tbs-internet.com + 0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=sha-256 production/CN=sha256.tbs-internet.com + i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC +-----BEGIN CERTIFICATE----- +MIIGXDCCBUSgAwIBAgIRAKpVmHgg9nfCodAVwcP4siwwDQYJKoZIhvcNAQELBQAw +gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl +bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u +ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg +Q0EgU0dDMB4XDTEyMDEwNDAwMDAwMFoXDTE0MDIxNzIzNTk1OVowgcsxCzAJBgNV +BAYTAkZSMQ4wDAYDVQQREwUxNDAwMDERMA8GA1UECBMIQ2FsdmFkb3MxDTALBgNV +BAcTBENBRU4xGzAZBgNVBAkTEjIyIHJ1ZSBkZSBCcmV0YWduZTEVMBMGA1UEChMM +VEJTIElOVEVSTkVUMRcwFQYDVQQLEw4wMDAyIDQ0MDQ0MzgxMDEbMBkGA1UECxMS +c2hhLTI1NiBwcm9kdWN0aW9uMSAwHgYDVQQDExdzaGEyNTYudGJzLWludGVybmV0 +LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQIX/zdJcyxty0m +PM1XQSoSSifueS3AVcgqMsaIKS/u+rYzsv4hQ/qA6vLn5m5/ewUcZDj7zdi6rBVf +PaVNXJ6YinLX0tkaW8TEjeVuZG5yksGZlhCt1CJ1Ho9XLiLaP4uJ7MCoNUntpJ+E +LfrOdgsIj91kPmwjDJeztVcQCvKzhjVJA/KxdInc0JvOATn7rpaSmQI5bvIjufgo +qVsTPwVFzuUYULXBk7KxRT7MiEqnd5HvviNh0285QC478zl3v0I0Fb5El4yD3p49 +IthcRnxzMKc0UhU5ogi0SbONyBfm/mzONVfSxpM+MlyvZmJqrbuuLoEDzJD+t8PU +xSuzgbcCAwEAAaOCAj4wggI6MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf +2YIfMB0GA1UdDgQWBBT/qTGYdaj+f61c2IRFL/B1eEsM8DAOBgNVHQ8BAf8EBAMC +BaAwDAYDVR0TAQH/BAIwADA0BgNVHSUELTArBggrBgEFBQcDAQYIKwYBBQUHAwIG +CisGAQQBgjcKAwMGCWCGSAGG+EIEATBLBgNVHSAERDBCMEAGCisGAQQB5TcCBAEw +MjAwBggrBgEFBQcCARYkaHR0cHM6Ly93d3cudGJzLWludGVybmV0LmNvbS9DQS9D +UFM0MG0GA1UdHwRmMGQwMqAwoC6GLGh0dHA6Ly9jcmwudGJzLWludGVybmV0LmNv +bS9UQlNYNTA5Q0FTR0MuY3JsMC6gLKAqhihodHRwOi8vY3JsLnRicy14NTA5LmNv +bS9UQlNYNTA5Q0FTR0MuY3JsMIGmBggrBgEFBQcBAQSBmTCBljA4BggrBgEFBQcw +AoYsaHR0cDovL2NydC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQVNHQy5jcnQw +NAYIKwYBBQUHMAKGKGh0dHA6Ly9jcnQudGJzLXg1MDkuY29tL1RCU1g1MDlDQVNH +Qy5jcnQwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLnRicy14NTA5LmNvbTA/BgNV +HREEODA2ghdzaGEyNTYudGJzLWludGVybmV0LmNvbYIbd3d3LnNoYTI1Ni50YnMt +aW50ZXJuZXQuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQA0pOuL8QvAa5yksTbGShzX +ABApagunUGoEydv4YJT1MXy9tTp7DrWaozZSlsqBxrYAXP1d9r2fuKbEniYHxaQ0 +UYaf1VSIlDo1yuC8wE7wxbHDIpQ/E5KAyxiaJ8obtDhFstWAPAH+UoGXq0kj2teN +21sFQ5dXgA95nldvVFsFhrRUNB6xXAcaj0VZFhttI0ZfQZmQwEI/P+N9Jr40OGun +aa+Dn0TMeUH4U20YntfLbu2nDcJcYfyurm+8/0Tr4HznLnedXu9pCPYj0TaddrgT +XO0oFiyy7qGaY6+qKh71yD64Y3ycCJ/HR9Wm39mjZYc9ezYwT4noP6r7Lk8YO7/q +-----END CERTIFICATE----- + 1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC + i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQXpDZ0ETJMV02WTx3GTnhhTANBgkqhkiG9w0BAQUFADBv +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFk +ZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBF +eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDYyNDE5MDYzMFow +gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl +bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u +ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg +Q0EgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsgOkO3f7wzN6 +rOjg45tR5vjBfzK7qmV9IBxb/QW9EEXxG+E7FNhZqQLtwGBKoSsHTnQqV75wWMk0 +9tinWvftBkSpj5sTi/8cbzJfUvTSVYh3Qxv6AVVjMMH/ruLjE6y+4PoaPs8WoYAQ +ts5R4Z1g8c/WnTepLst2x0/Wv7GmuoQi+gXvHU6YrBiu7XkeYhzc95QdviWSJRDk +owhb5K43qhcvjRmBfO/paGlCliDGZp8mHwrI21mwobWpVjTxZRwYO3bd4+TGcI4G +Ie5wmHwE8F7SK1tgSqbBacKjDa93j7txKkfz/Yd2n7TGqOXiHPsJpG655vrKtnXk +9vs1zoDeJQIDAQABo4IBljCCAZIwHQYDVR0OBBYEFAdEdoWTKLx/bXjSCuv6TEvf +2YIfMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMCAGA1UdJQQZ +MBcGCisGAQQBgjcKAwMGCWCGSAGG+EIEATAYBgNVHSAEETAPMA0GCysGAQQBgOU3 +AgQBMHsGA1UdHwR0MHIwOKA2oDSGMmh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL0Fk +ZFRydXN0RXh0ZXJuYWxDQVJvb3QuY3JsMDagNKAyhjBodHRwOi8vY3JsLmNvbW9k +by5uZXQvQWRkVHJ1c3RFeHRlcm5hbENBUm9vdC5jcmwwgYAGCCsGAQUFBwEBBHQw +cjA4BggrBgEFBQcwAoYsaHR0cDovL2NydC5jb21vZG9jYS5jb20vQWRkVHJ1c3RV +VE5TR0NDQS5jcnQwNgYIKwYBBQUHMAKGKmh0dHA6Ly9jcnQuY29tb2RvLm5ldC9B +ZGRUcnVzdFVUTlNHQ0NBLmNydDARBglghkgBhvhCAQEEBAMCAgQwDQYJKoZIhvcN +AQEFBQADggEBAK2zEzs+jcIrVK9oDkdDZNvhuBYTdCfpxfFs+OAujW0bIfJAy232 +euVsnJm6u/+OrqKudD2tad2BbejLLXhMZViaCmK7D9nrXHx4te5EP8rL19SUVqLY +1pTnv5dhNgEgvA7n5lIzDSYs7yRLsr7HJsYPr6SeYSuZizyX1SNz7ooJ32/F3X98 +RB0Mlc/E0OyOrkQ9/y5IrnpnaSora8CnUrV5XNOg+kyCz9edCyx4D5wXYcwZPVWz +8aDqquESrezPyjtfi4WRO4s/VD3HLZvOxzMrWAVYCDG9FxaOhF0QGuuG1F7F3GKV +v6prNyCl016kRl2j1UT+a7gLd8fA25A4C9E= +-----END CERTIFICATE----- + 2 s:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root + i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC +-----BEGIN CERTIFICATE----- +MIIEZjCCA06gAwIBAgIQUSYKkxzif5zDpV954HKugjANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw0wNTA2MDcwODA5MTBaFw0xOTA2MjQxOTA2MzBaMG8xCzAJBgNVBAYT +AlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0 +ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB +IFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC39xoz5vIABC05 +4E5b7R+8bA/Ntfojts7emxEzl6QpTH2Tn71KvJPtAxrjj8/lbVBa1pcplFqAsEl6 +2y6V/bjKvzc4LR4+kUGtcFbH8E8/6DKedMrIkFTpxl8PeJ2aQDwOrGGqXhSPnoeh +alDc15pOrwWzpnGUnHGzUGAKxxOdOAeGAqjpqGkmGJCrTLBPI6s6T4TY386f4Wlv +u9dC12tE5Met7m1BX3JacQg3s3llpFmglDf3AC8NwpJy2tA4ctsUqEXEXSp9t7TW +xO6szRNEt8kr3UMAJfphuWlqWCMRt6czj1Z1WfXNKddGtworZbbTQm8Vsrh7++/p +XVPVNFonAgMBAAGjgdgwgdUwHwYDVR0jBBgwFoAUUzLRs89/+uDxoF2FTpLSnkUd +tE8wHQYDVR0OBBYEFK29mHo0tCb3+sQmVO8DveAky1QaMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MBEGCWCGSAGG+EIBAQQEAwIBAjAgBgNVHSUEGTAX +BgorBgEEAYI3CgMDBglghkgBhvhCBAEwPQYDVR0fBDYwNDAyoDCgLoYsaHR0cDov +L2NybC51c2VydHJ1c3QuY29tL1VUTi1EQVRBQ29ycFNHQy5jcmwwDQYJKoZIhvcN +AQEFBQADggEBAMbuUxdoFLJRIh6QWA2U/b3xcOWGLcM2MY9USEbnLQg3vGwKYOEO +rVE04BKT6b64q7gmtOmWPSiPrmQH/uAB7MXjkesYoPF1ftsK5p+R26+udd8jkWjd +FwBaS/9kbHDrARrQkNnHptZt9hPk/7XJ0h4qy7ElQyZ42TCbTg0evmnv3+r+LbPM ++bDdtRTKkdSytaX7ARmjR3mfnYyVhzT4HziS2jamEfpr62vp3EV4FTkG101B5CHI +3C+H0be/SGB1pWLLJN47YaApIKa+xWycxOkKaSLvkTr6Jq/RW0GnOuL4OAdCq8Fb ++M5tug8EPzI0rNwEKNdwMBQmBsTkm5jVz3g= +-----END CERTIFICATE----- + 3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC + i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG +EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD +VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu +dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 +E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ +D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK +4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq +lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW +bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB +o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT +MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js +LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr +BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB +AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj +j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH +KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv +2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 +mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_cert.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_cert.pem new file mode 100644 index 000000000..47a7d7e37 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_cert.pem @@ -0,0 +1,15 @@ +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_key.passwd.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_key.passwd.pem new file mode 100644 index 000000000..2524672e7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_key.passwd.pem @@ -0,0 +1,18 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A + +kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c +u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA +AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr +Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ +YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P +6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ +noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 +94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l +7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo +cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO +zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt +L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo +2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== +-----END RSA PRIVATE KEY----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_key.pem b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_key.pem new file mode 100644 index 000000000..3fd3bbd54 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_key.pem @@ -0,0 +1,16 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm +LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 +ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP +USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt +CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq +SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK +UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y +BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ +ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 +oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik +eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F +0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS +x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ +SPIXQuT8RMPDVNQ= +-----END PRIVATE KEY----- diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_servers.py b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_servers.py new file mode 100644 index 000000000..87a3fb855 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/ssl_servers.py @@ -0,0 +1,207 @@ +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import filter, str +from future import utils +import os +import sys +import ssl +import pprint +import socket +from future.backports.urllib import parse as urllib_parse +from future.backports.http.server import (HTTPServer as _HTTPServer, + SimpleHTTPRequestHandler, BaseHTTPRequestHandler) +from future.backports.test import support +threading = support.import_module("threading") + +here = os.path.dirname(__file__) + +HOST = support.HOST +CERTFILE = os.path.join(here, 'keycert.pem') + +# This one's based on HTTPServer, which is based on SocketServer + +class HTTPSServer(_HTTPServer): + + def __init__(self, server_address, handler_class, context): + _HTTPServer.__init__(self, server_address, handler_class) + self.context = context + + def __str__(self): + return ('<%s %s:%s>' % + (self.__class__.__name__, + self.server_name, + self.server_port)) + + def get_request(self): + # override this to wrap socket with SSL + try: + sock, addr = self.socket.accept() + sslconn = self.context.wrap_socket(sock, server_side=True) + except socket.error as e: + # socket errors are silenced by the caller, print them here + if support.verbose: + sys.stderr.write("Got an error:\n%s\n" % e) + raise + return sslconn, addr + +class RootedHTTPRequestHandler(SimpleHTTPRequestHandler): + # need to override translate_path to get a known root, + # instead of using os.curdir, since the test could be + # run from anywhere + + server_version = "TestHTTPS/1.0" + root = here + # Avoid hanging when a request gets interrupted by the client + timeout = 5 + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = urllib.parse.urlparse(path)[2] + path = os.path.normpath(urllib.parse.unquote(path)) + words = path.split('/') + words = filter(None, words) + path = self.root + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + path = os.path.join(path, word) + return path + + def log_message(self, format, *args): + # we override this to suppress logging unless "verbose" + if support.verbose: + sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" % + (self.server.server_address, + self.server.server_port, + self.request.cipher(), + self.log_date_time_string(), + format%args)) + + +class StatsRequestHandler(BaseHTTPRequestHandler): + """Example HTTP request handler which returns SSL statistics on GET + requests. + """ + + server_version = "StatsHTTPS/1.0" + + def do_GET(self, send_body=True): + """Serve a GET request.""" + sock = self.rfile.raw._sock + context = sock.context + stats = { + 'session_cache': context.session_stats(), + 'cipher': sock.cipher(), + 'compression': sock.compression(), + } + body = pprint.pformat(stats) + body = body.encode('utf-8') + self.send_response(200) + self.send_header("Content-type", "text/plain; charset=utf-8") + self.send_header("Content-Length", str(len(body))) + self.end_headers() + if send_body: + self.wfile.write(body) + + def do_HEAD(self): + """Serve a HEAD request.""" + self.do_GET(send_body=False) + + def log_request(self, format, *args): + if support.verbose: + BaseHTTPRequestHandler.log_request(self, format, *args) + + +class HTTPSServerThread(threading.Thread): + + def __init__(self, context, host=HOST, handler_class=None): + self.flag = None + self.server = HTTPSServer((host, 0), + handler_class or RootedHTTPRequestHandler, + context) + self.port = self.server.server_port + threading.Thread.__init__(self) + self.daemon = True + + def __str__(self): + return "<%s %s>" % (self.__class__.__name__, self.server) + + def start(self, flag=None): + self.flag = flag + threading.Thread.start(self) + + def run(self): + if self.flag: + self.flag.set() + try: + self.server.serve_forever(0.05) + finally: + self.server.server_close() + + def stop(self): + self.server.shutdown() + + +def make_https_server(case, certfile=CERTFILE, host=HOST, handler_class=None): + # we assume the certfile contains both private key and certificate + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.load_cert_chain(certfile) + server = HTTPSServerThread(context, host, handler_class) + flag = threading.Event() + server.start(flag) + flag.wait() + def cleanup(): + if support.verbose: + sys.stdout.write('stopping HTTPS server\n') + server.stop() + if support.verbose: + sys.stdout.write('joining HTTPS thread\n') + server.join() + case.addCleanup(cleanup) + return server + + +if __name__ == "__main__": + import argparse + parser = argparse.ArgumentParser( + description='Run a test HTTPS server. ' + 'By default, the current directory is served.') + parser.add_argument('-p', '--port', type=int, default=4433, + help='port to listen on (default: %(default)s)') + parser.add_argument('-q', '--quiet', dest='verbose', default=True, + action='store_false', help='be less verbose') + parser.add_argument('-s', '--stats', dest='use_stats_handler', default=False, + action='store_true', help='always return stats page') + parser.add_argument('--curve-name', dest='curve_name', type=str, + action='store', + help='curve name for EC-based Diffie-Hellman') + parser.add_argument('--dh', dest='dh_file', type=str, action='store', + help='PEM file containing DH parameters') + args = parser.parse_args() + + support.verbose = args.verbose + if args.use_stats_handler: + handler_class = StatsRequestHandler + else: + handler_class = RootedHTTPRequestHandler + if utils.PY2: + handler_class.root = os.getcwdu() + else: + handler_class.root = os.getcwd() + context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + context.load_cert_chain(CERTFILE) + if args.curve_name: + context.set_ecdh_curve(args.curve_name) + if args.dh_file: + context.load_dh_params(args.dh_file) + + server = HTTPSServer(("", args.port), handler_class, context) + if args.verbose: + print("Listening on https://localhost:{0.port}".format(args)) + server.serve_forever(0.1) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/test/support.py b/.install/.kodi/addons/script.module.future/libs/future/backports/test/support.py new file mode 100644 index 000000000..1999e208f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/test/support.py @@ -0,0 +1,2048 @@ +# -*- coding: utf-8 -*- +"""Supporting definitions for the Python regression tests. + +Backported for python-future from Python 3.3 test/support.py. +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future import utils +from future.builtins import str, range, open, int, map, list + +import contextlib +import errno +import functools +import gc +import socket +import sys +import os +import platform +import shutil +import warnings +import unittest +# For Python 2.6 compatibility: +if not hasattr(unittest, 'skip'): + import unittest2 as unittest + +import importlib +# import collections.abc # not present on Py2.7 +import re +import subprocess +import imp +import time +try: + import sysconfig +except ImportError: + # sysconfig is not available on Python 2.6. Try using distutils.sysconfig instead: + from distutils import sysconfig +import fnmatch +import logging.handlers +import struct +import tempfile + +try: + if utils.PY3: + import _thread, threading + else: + import thread as _thread, threading +except ImportError: + _thread = None + threading = None +try: + import multiprocessing.process +except ImportError: + multiprocessing = None + +try: + import zlib +except ImportError: + zlib = None + +try: + import gzip +except ImportError: + gzip = None + +try: + import bz2 +except ImportError: + bz2 = None + +try: + import lzma +except ImportError: + lzma = None + +__all__ = [ + "Error", "TestFailed", "ResourceDenied", "import_module", "verbose", + "use_resources", "max_memuse", "record_original_stdout", + "get_original_stdout", "unload", "unlink", "rmtree", "forget", + "is_resource_enabled", "requires", "requires_freebsd_version", + "requires_linux_version", "requires_mac_ver", "find_unused_port", + "bind_port", "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", + "temp_cwd", "findfile", "create_empty_file", "sortdict", + "check_syntax_error", "open_urlresource", "check_warnings", "CleanImport", + "EnvironmentVarGuard", "TransientResource", "captured_stdout", + "captured_stdin", "captured_stderr", "time_out", "socket_peer_reset", + "ioerror_peer_reset", "run_with_locale", 'temp_umask', + "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest", + "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup", + "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail", + "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754", + "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink", + "skip_unless_xattr", "import_fresh_module", "requires_zlib", + "PIPE_MAX_SIZE", "failfast", "anticipate_failure", "run_with_tz", + "requires_gzip", "requires_bz2", "requires_lzma", "suppress_crash_popup", + ] + +class Error(Exception): + """Base class for regression test exceptions.""" + +class TestFailed(Error): + """Test failed.""" + +class ResourceDenied(unittest.SkipTest): + """Test skipped because it requested a disallowed resource. + + This is raised when a test calls requires() for a resource that + has not be enabled. It is used to distinguish between expected + and unexpected skips. + """ + +@contextlib.contextmanager +def _ignore_deprecated_imports(ignore=True): + """Context manager to suppress package and module deprecation + warnings when importing them. + + If ignore is False, this context manager has no effect.""" + if ignore: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", ".+ (module|package)", + DeprecationWarning) + yield + else: + yield + + +def import_module(name, deprecated=False): + """Import and return the module to be tested, raising SkipTest if + it is not available. + + If deprecated is True, any module or package deprecation messages + will be suppressed.""" + with _ignore_deprecated_imports(deprecated): + try: + return importlib.import_module(name) + except ImportError as msg: + raise unittest.SkipTest(str(msg)) + + +def _save_and_remove_module(name, orig_modules): + """Helper function to save and remove a module from sys.modules + + Raise ImportError if the module can't be imported. + """ + # try to import the module and raise an error if it can't be imported + if name not in sys.modules: + __import__(name) + del sys.modules[name] + for modname in list(sys.modules): + if modname == name or modname.startswith(name + '.'): + orig_modules[modname] = sys.modules[modname] + del sys.modules[modname] + +def _save_and_block_module(name, orig_modules): + """Helper function to save and block a module in sys.modules + + Return True if the module was in sys.modules, False otherwise. + """ + saved = True + try: + orig_modules[name] = sys.modules[name] + except KeyError: + saved = False + sys.modules[name] = None + return saved + + +def anticipate_failure(condition): + """Decorator to mark a test that is known to be broken in some cases + + Any use of this decorator should have a comment identifying the + associated tracker issue. + """ + if condition: + return unittest.expectedFailure + return lambda f: f + + +def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): + """Import and return a module, deliberately bypassing sys.modules. + This function imports and returns a fresh copy of the named Python module + by removing the named module from sys.modules before doing the import. + Note that unlike reload, the original module is not affected by + this operation. + + *fresh* is an iterable of additional module names that are also removed + from the sys.modules cache before doing the import. + + *blocked* is an iterable of module names that are replaced with None + in the module cache during the import to ensure that attempts to import + them raise ImportError. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + sys.modules when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is True. + + This function will raise ImportError if the named module cannot be + imported. + + If deprecated is True, any module or package deprecation messages + will be suppressed. + """ + # NOTE: test_heapq, test_json and test_warnings include extra sanity checks + # to make sure that this utility function is working as expected + with _ignore_deprecated_imports(deprecated): + # Keep track of modules saved for later restoration as well + # as those which just need a blocking entry removed + orig_modules = {} + names_to_remove = [] + _save_and_remove_module(name, orig_modules) + try: + for fresh_name in fresh: + _save_and_remove_module(fresh_name, orig_modules) + for blocked_name in blocked: + if not _save_and_block_module(blocked_name, orig_modules): + names_to_remove.append(blocked_name) + fresh_module = importlib.import_module(name) + except ImportError: + fresh_module = None + finally: + for orig_name, module in orig_modules.items(): + sys.modules[orig_name] = module + for name_to_remove in names_to_remove: + del sys.modules[name_to_remove] + return fresh_module + + +def get_attribute(obj, name): + """Get an attribute, raising SkipTest if AttributeError is raised.""" + try: + attribute = getattr(obj, name) + except AttributeError: + raise unittest.SkipTest("object %r has no attribute %r" % (obj, name)) + else: + return attribute + +verbose = 1 # Flag set to 0 by regrtest.py +use_resources = None # Flag set to [] by regrtest.py +max_memuse = 0 # Disable bigmem tests (they will still be run with + # small sizes, to make sure they work.) +real_max_memuse = 0 +failfast = False +match_tests = None + +# _original_stdout is meant to hold stdout at the time regrtest began. +# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. +# The point is to have some flavor of stdout the user can actually see. +_original_stdout = None +def record_original_stdout(stdout): + global _original_stdout + _original_stdout = stdout + +def get_original_stdout(): + return _original_stdout or sys.stdout + +def unload(name): + try: + del sys.modules[name] + except KeyError: + pass + +if sys.platform.startswith("win"): + def _waitfor(func, pathname, waitall=False): + # Perform the operation + func(pathname) + # Now setup the wait loop + if waitall: + dirname = pathname + else: + dirname, name = os.path.split(pathname) + dirname = dirname or '.' + # Check for `pathname` to be removed from the filesystem. + # The exponential backoff of the timeout amounts to a total + # of ~1 second after which the deletion is probably an error + # anyway. + # Testing on a i7@4.3GHz shows that usually only 1 iteration is + # required when contention occurs. + timeout = 0.001 + while timeout < 1.0: + # Note we are only testing for the existence of the file(s) in + # the contents of the directory regardless of any security or + # access rights. If we have made it this far, we have sufficient + # permissions to do that much using Python's equivalent of the + # Windows API FindFirstFile. + # Other Windows APIs can fail or give incorrect results when + # dealing with files that are pending deletion. + L = os.listdir(dirname) + if not (L if waitall else name in L): + return + # Increase the timeout and try again + time.sleep(timeout) + timeout *= 2 + warnings.warn('tests may fail, delete still pending for ' + pathname, + RuntimeWarning, stacklevel=4) + + def _unlink(filename): + _waitfor(os.unlink, filename) + + def _rmdir(dirname): + _waitfor(os.rmdir, dirname) + + def _rmtree(path): + def _rmtree_inner(path): + for name in os.listdir(path): + fullname = os.path.join(path, name) + if os.path.isdir(fullname): + _waitfor(_rmtree_inner, fullname, waitall=True) + os.rmdir(fullname) + else: + os.unlink(fullname) + _waitfor(_rmtree_inner, path, waitall=True) + _waitfor(os.rmdir, path) +else: + _unlink = os.unlink + _rmdir = os.rmdir + _rmtree = shutil.rmtree + +def unlink(filename): + try: + _unlink(filename) + except OSError as error: + # The filename need not exist. + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + +def rmdir(dirname): + try: + _rmdir(dirname) + except OSError as error: + # The directory need not exist. + if error.errno != errno.ENOENT: + raise + +def rmtree(path): + try: + _rmtree(path) + except OSError as error: + if error.errno != errno.ENOENT: + raise + +def make_legacy_pyc(source): + """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location. + + The choice of .pyc or .pyo extension is done based on the __debug__ flag + value. + + :param source: The file system path to the source file. The source file + does not need to exist, however the PEP 3147 pyc file must exist. + :return: The file system path to the legacy pyc file. + """ + pyc_file = imp.cache_from_source(source) + up_one = os.path.dirname(os.path.abspath(source)) + legacy_pyc = os.path.join(up_one, source + ('c' if __debug__ else 'o')) + os.rename(pyc_file, legacy_pyc) + return legacy_pyc + +def forget(modname): + """'Forget' a module was ever imported. + + This removes the module from sys.modules and deletes any PEP 3147 or + legacy .pyc and .pyo files. + """ + unload(modname) + for dirname in sys.path: + source = os.path.join(dirname, modname + '.py') + # It doesn't matter if they exist or not, unlink all possible + # combinations of PEP 3147 and legacy pyc and pyo files. + unlink(source + 'c') + unlink(source + 'o') + unlink(imp.cache_from_source(source, debug_override=True)) + unlink(imp.cache_from_source(source, debug_override=False)) + +# On some platforms, should not run gui test even if it is allowed +# in `use_resources'. +if sys.platform.startswith('win'): + import ctypes + import ctypes.wintypes + def _is_gui_available(): + UOI_FLAGS = 1 + WSF_VISIBLE = 0x0001 + class USEROBJECTFLAGS(ctypes.Structure): + _fields_ = [("fInherit", ctypes.wintypes.BOOL), + ("fReserved", ctypes.wintypes.BOOL), + ("dwFlags", ctypes.wintypes.DWORD)] + dll = ctypes.windll.user32 + h = dll.GetProcessWindowStation() + if not h: + raise ctypes.WinError() + uof = USEROBJECTFLAGS() + needed = ctypes.wintypes.DWORD() + res = dll.GetUserObjectInformationW(h, + UOI_FLAGS, + ctypes.byref(uof), + ctypes.sizeof(uof), + ctypes.byref(needed)) + if not res: + raise ctypes.WinError() + return bool(uof.dwFlags & WSF_VISIBLE) +else: + def _is_gui_available(): + return True + +def is_resource_enabled(resource): + """Test whether a resource is enabled. Known resources are set by + regrtest.py.""" + return use_resources is not None and resource in use_resources + +def requires(resource, msg=None): + """Raise ResourceDenied if the specified resource is not available. + + If the caller's module is __main__ then automatically return True. The + possibility of False being returned occurs when regrtest.py is + executing. + """ + if resource == 'gui' and not _is_gui_available(): + raise unittest.SkipTest("Cannot use the 'gui' resource") + # see if the caller's module is __main__ - if so, treat as if + # the resource was set + if sys._getframe(1).f_globals.get("__name__") == "__main__": + return + if not is_resource_enabled(resource): + if msg is None: + msg = "Use of the %r resource not enabled" % resource + raise ResourceDenied(msg) + +def _requires_unix_version(sysname, min_version): + """Decorator raising SkipTest if the OS is `sysname` and the version is less + than `min_version`. + + For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if + the FreeBSD version is less than 7.2. + """ + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kw): + if platform.system() == sysname: + version_txt = platform.release().split('-', 1)[0] + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + pass + else: + if version < min_version: + min_version_txt = '.'.join(map(str, min_version)) + raise unittest.SkipTest( + "%s version %s or higher required, not %s" + % (sysname, min_version_txt, version_txt)) + return func(*args, **kw) + wrapper.min_version = min_version + return wrapper + return decorator + +def requires_freebsd_version(*min_version): + """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is + less than `min_version`. + + For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD + version is less than 7.2. + """ + return _requires_unix_version('FreeBSD', min_version) + +def requires_linux_version(*min_version): + """Decorator raising SkipTest if the OS is Linux and the Linux version is + less than `min_version`. + + For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux + version is less than 2.6.32. + """ + return _requires_unix_version('Linux', min_version) + +def requires_mac_ver(*min_version): + """Decorator raising SkipTest if the OS is Mac OS X and the OS X + version if less than min_version. + + For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version + is lesser than 10.5. + """ + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kw): + if sys.platform == 'darwin': + version_txt = platform.mac_ver()[0] + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + pass + else: + if version < min_version: + min_version_txt = '.'.join(map(str, min_version)) + raise unittest.SkipTest( + "Mac OS X %s or higher required, not %s" + % (min_version_txt, version_txt)) + return func(*args, **kw) + wrapper.min_version = min_version + return wrapper + return decorator + +# Don't use "localhost", since resolving it uses the DNS under recent +# Windows versions (see issue #18792). +HOST = "127.0.0.1" +HOSTv6 = "::1" + + +def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): + """Returns an unused port that should be suitable for binding. This is + achieved by creating a temporary socket with the same family and type as + the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to + the specified host address (defaults to 0.0.0.0) with the port set to 0, + eliciting an unused ephemeral port from the OS. The temporary socket is + then closed and deleted, and the ephemeral port is returned. + + Either this method or bind_port() should be used for any tests where a + server socket needs to be bound to a particular port for the duration of + the test. Which one to use depends on whether the calling code is creating + a python socket, or if an unused port needs to be provided in a constructor + or passed to an external program (i.e. the -accept argument to openssl's + s_server mode). Always prefer bind_port() over find_unused_port() where + possible. Hard coded ports should *NEVER* be used. As soon as a server + socket is bound to a hard coded port, the ability to run multiple instances + of the test simultaneously on the same host is compromised, which makes the + test a ticking time bomb in a buildbot environment. On Unix buildbots, this + may simply manifest as a failed test, which can be recovered from without + intervention in most cases, but on Windows, the entire python process can + completely and utterly wedge, requiring someone to log in to the buildbot + and manually kill the affected process. + + (This is easy to reproduce on Windows, unfortunately, and can be traced to + the SO_REUSEADDR socket option having different semantics on Windows versus + Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, + listen and then accept connections on identical host/ports. An EADDRINUSE + socket.error will be raised at some point (depending on the platform and + the order bind and listen were called on each socket). + + However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE + will ever be raised when attempting to bind two identical host/ports. When + accept() is called on each socket, the second caller's process will steal + the port from the first caller, leaving them both in an awkwardly wedged + state where they'll no longer respond to any signals or graceful kills, and + must be forcibly killed via OpenProcess()/TerminateProcess(). + + The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option + instead of SO_REUSEADDR, which effectively affords the same semantics as + SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open + Source world compared to Windows ones, this is a common mistake. A quick + look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when + openssl.exe is called with the 's_server' option, for example. See + http://bugs.python.org/issue2550 for more info. The following site also + has a very thorough description about the implications of both REUSEADDR + and EXCLUSIVEADDRUSE on Windows: + http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) + + XXX: although this approach is a vast improvement on previous attempts to + elicit unused ports, it rests heavily on the assumption that the ephemeral + port returned to us by the OS won't immediately be dished back out to some + other process when we close and delete our temporary socket but before our + calling code has a chance to bind the returned port. We can deal with this + issue if/when we come across it. + """ + + tempsock = socket.socket(family, socktype) + port = bind_port(tempsock) + tempsock.close() + del tempsock + return port + +def bind_port(sock, host=HOST): + """Bind the socket to a free port and return the port number. Relies on + ephemeral ports in order to ensure we are using an unbound port. This is + important as many tests may be running simultaneously, especially in a + buildbot environment. This method raises an exception if the sock.family + is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR + or SO_REUSEPORT set on it. Tests should *never* set these socket options + for TCP/IP sockets. The only case for setting these options is testing + multicasting via multiple UDP sockets. + + Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. + on Windows), it will be set on the socket. This will prevent anyone else + from bind()'ing to our host/port for the duration of the test. + """ + + if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: + if hasattr(socket, 'SO_REUSEADDR'): + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: + raise TestFailed("tests should never set the SO_REUSEADDR " \ + "socket option on TCP/IP sockets!") + if hasattr(socket, 'SO_REUSEPORT'): + try: + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: + raise TestFailed("tests should never set the SO_REUSEPORT " \ + "socket option on TCP/IP sockets!") + except socket.error: + # Python's socket module was compiled using modern headers + # thus defining SO_REUSEPORT but this process is running + # under an older kernel that does not support SO_REUSEPORT. + pass + if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + + sock.bind((host, 0)) + port = sock.getsockname()[1] + return port + +def _is_ipv6_enabled(): + """Check whether IPv6 is enabled on this host.""" + if socket.has_ipv6: + sock = None + try: + sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + sock.bind(('::1', 0)) + return True + except (socket.error, socket.gaierror): + pass + finally: + if sock: + sock.close() + return False + +IPV6_ENABLED = _is_ipv6_enabled() + + +# A constant likely larger than the underlying OS pipe buffer size, to +# make writes blocking. +# Windows limit seems to be around 512 B, and many Unix kernels have a +# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure. +# (see issue #17835 for a discussion of this number). +PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1 + +# A constant likely larger than the underlying OS socket buffer size, to make +# writes blocking. +# The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl +# on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643 +# for a discussion of this number). +SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1 + +# # decorator for skipping tests on non-IEEE 754 platforms +# requires_IEEE_754 = unittest.skipUnless( +# float.__getformat__("double").startswith("IEEE"), +# "test requires IEEE 754 doubles") + +requires_zlib = unittest.skipUnless(zlib, 'requires zlib') + +requires_bz2 = unittest.skipUnless(bz2, 'requires bz2') + +requires_lzma = unittest.skipUnless(lzma, 'requires lzma') + +is_jython = sys.platform.startswith('java') + +# Filename used for testing +if os.name == 'java': + # Jython disallows @ in module names + TESTFN = '$test' +else: + TESTFN = '@test' + +# Disambiguate TESTFN for parallel testing, while letting it remain a valid +# module name. +TESTFN = "{0}_{1}_tmp".format(TESTFN, os.getpid()) + +# # FS_NONASCII: non-ASCII character encodable by os.fsencode(), +# # or None if there is no such character. +# FS_NONASCII = None +# for character in ( +# # First try printable and common characters to have a readable filename. +# # For each character, the encoding list are just example of encodings able +# # to encode the character (the list is not exhaustive). +# +# # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 +# '\u00E6', +# # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 +# '\u0130', +# # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 +# '\u0141', +# # U+03C6 (Greek Small Letter Phi): cp1253 +# '\u03C6', +# # U+041A (Cyrillic Capital Letter Ka): cp1251 +# '\u041A', +# # U+05D0 (Hebrew Letter Alef): Encodable to cp424 +# '\u05D0', +# # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic +# '\u060C', +# # U+062A (Arabic Letter Teh): cp720 +# '\u062A', +# # U+0E01 (Thai Character Ko Kai): cp874 +# '\u0E01', +# +# # Then try more "special" characters. "special" because they may be +# # interpreted or displayed differently depending on the exact locale +# # encoding and the font. +# +# # U+00A0 (No-Break Space) +# '\u00A0', +# # U+20AC (Euro Sign) +# '\u20AC', +# ): +# try: +# os.fsdecode(os.fsencode(character)) +# except UnicodeError: +# pass +# else: +# FS_NONASCII = character +# break +# +# # TESTFN_UNICODE is a non-ascii filename +# TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" +# if sys.platform == 'darwin': +# # In Mac OS X's VFS API file names are, by definition, canonically +# # decomposed Unicode, encoded using UTF-8. See QA1173: +# # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html +# import unicodedata +# TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) +# TESTFN_ENCODING = sys.getfilesystemencoding() +# +# # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be +# # encoded by the filesystem encoding (in strict mode). It can be None if we +# # cannot generate such filename. +# TESTFN_UNENCODABLE = None +# if os.name in ('nt', 'ce'): +# # skip win32s (0) or Windows 9x/ME (1) +# if sys.getwindowsversion().platform >= 2: +# # Different kinds of characters from various languages to minimize the +# # probability that the whole name is encodable to MBCS (issue #9819) +# TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" +# try: +# TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) +# except UnicodeEncodeError: +# pass +# else: +# print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' +# 'Unicode filename tests may not be effective' +# % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) +# TESTFN_UNENCODABLE = None +# # Mac OS X denies unencodable filenames (invalid utf-8) +# elif sys.platform != 'darwin': +# try: +# # ascii and utf-8 cannot encode the byte 0xff +# b'\xff'.decode(TESTFN_ENCODING) +# except UnicodeDecodeError: +# # 0xff will be encoded using the surrogate character u+DCFF +# TESTFN_UNENCODABLE = TESTFN \ +# + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') +# else: +# # File system encoding (eg. ISO-8859-* encodings) can encode +# # the byte 0xff. Skip some unicode filename tests. +# pass +# +# # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be +# # decoded from the filesystem encoding (in strict mode). It can be None if we +# # cannot generate such filename (ex: the latin1 encoding can decode any byte +# # sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks +# # to the surrogateescape error handler (PEP 383), but not from the filesystem +# # encoding in strict mode. +# TESTFN_UNDECODABLE = None +# for name in ( +# # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows +# # accepts it to create a file or a directory, or don't accept to enter to +# # such directory (when the bytes name is used). So test b'\xe7' first: it is +# # not decodable from cp932. +# b'\xe7w\xf0', +# # undecodable from ASCII, UTF-8 +# b'\xff', +# # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 +# # and cp857 +# b'\xae\xd5' +# # undecodable from UTF-8 (UNIX and Mac OS X) +# b'\xed\xb2\x80', b'\xed\xb4\x80', +# # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, +# # cp1253, cp1254, cp1255, cp1257, cp1258 +# b'\x81\x98', +# ): +# try: +# name.decode(TESTFN_ENCODING) +# except UnicodeDecodeError: +# TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name +# break +# +# if FS_NONASCII: +# TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII +# else: +# TESTFN_NONASCII = None + +# Save the initial cwd +SAVEDCWD = os.getcwd() + +@contextlib.contextmanager +def temp_cwd(name='tempcwd', quiet=False, path=None): + """ + Context manager that temporarily changes the CWD. + + An existing path may be provided as *path*, in which case this + function makes no changes to the file system. + + Otherwise, the new CWD is created in the current directory and it's + named *name*. If *quiet* is False (default) and it's not possible to + create or change the CWD, an error is raised. If it's True, only a + warning is raised and the original CWD is used. + """ + saved_dir = os.getcwd() + is_temporary = False + if path is None: + path = name + try: + os.mkdir(name) + is_temporary = True + except OSError: + if not quiet: + raise + warnings.warn('tests may fail, unable to create temp CWD ' + name, + RuntimeWarning, stacklevel=3) + try: + os.chdir(path) + except OSError: + if not quiet: + raise + warnings.warn('tests may fail, unable to change the CWD to ' + path, + RuntimeWarning, stacklevel=3) + try: + yield os.getcwd() + finally: + os.chdir(saved_dir) + if is_temporary: + rmtree(name) + + +if hasattr(os, "umask"): + @contextlib.contextmanager + def temp_umask(umask): + """Context manager that temporarily sets the process umask.""" + oldmask = os.umask(umask) + try: + yield + finally: + os.umask(oldmask) + + +def findfile(file, here=__file__, subdir=None): + """Try to find a file on sys.path and the working directory. If it is not + found the argument passed to the function is returned (this does not + necessarily signal failure; could still be the legitimate path).""" + if os.path.isabs(file): + return file + if subdir is not None: + file = os.path.join(subdir, file) + path = sys.path + path = [os.path.dirname(here)] + path + for dn in path: + fn = os.path.join(dn, file) + if os.path.exists(fn): return fn + return file + +def create_empty_file(filename): + """Create an empty file. If the file already exists, truncate it.""" + fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + os.close(fd) + +def sortdict(dict): + "Like repr(dict), but in sorted order." + items = sorted(dict.items()) + reprpairs = ["%r: %r" % pair for pair in items] + withcommas = ", ".join(reprpairs) + return "{%s}" % withcommas + +def make_bad_fd(): + """ + Create an invalid file descriptor by opening and closing a file and return + its fd. + """ + file = open(TESTFN, "wb") + try: + return file.fileno() + finally: + file.close() + unlink(TESTFN) + +def check_syntax_error(testcase, statement): + testcase.assertRaises(SyntaxError, compile, statement, + '', 'exec') + +def open_urlresource(url, *args, **kw): + from future.backports.urllib import (request as urllib_request, + parse as urllib_parse) + + check = kw.pop('check', None) + + filename = urllib_parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! + + fn = os.path.join(os.path.dirname(__file__), "data", filename) + + def check_valid_file(fn): + f = open(fn, *args, **kw) + if check is None: + return f + elif check(f): + f.seek(0) + return f + f.close() + + if os.path.exists(fn): + f = check_valid_file(fn) + if f is not None: + return f + unlink(fn) + + # Verify the requirement before downloading the file + requires('urlfetch') + + print('\tfetching %s ...' % url, file=get_original_stdout()) + f = urllib_request.urlopen(url, timeout=15) + try: + with open(fn, "wb") as out: + s = f.read() + while s: + out.write(s) + s = f.read() + finally: + f.close() + + f = check_valid_file(fn) + if f is not None: + return f + raise TestFailed('invalid resource %r' % fn) + + +class WarningsRecorder(object): + """Convenience wrapper for the warnings list returned on + entry to the warnings.catch_warnings() context manager. + """ + def __init__(self, warnings_list): + self._warnings = warnings_list + self._last = 0 + + def __getattr__(self, attr): + if len(self._warnings) > self._last: + return getattr(self._warnings[-1], attr) + elif attr in warnings.WarningMessage._WARNING_DETAILS: + return None + raise AttributeError("%r has no attribute %r" % (self, attr)) + + @property + def warnings(self): + return self._warnings[self._last:] + + def reset(self): + self._last = len(self._warnings) + + +def _filterwarnings(filters, quiet=False): + """Catch the warnings, then check if all the expected + warnings have been raised and re-raise unexpected warnings. + If 'quiet' is True, only re-raise the unexpected warnings. + """ + # Clear the warning registry of the calling module + # in order to re-raise the warnings. + frame = sys._getframe(2) + registry = frame.f_globals.get('__warningregistry__') + if registry: + if utils.PY3: + registry.clear() + else: + # Py2-compatible: + for i in range(len(registry)): + registry.pop() + with warnings.catch_warnings(record=True) as w: + # Set filter "always" to record all warnings. Because + # test_warnings swap the module, we need to look up in + # the sys.modules dictionary. + sys.modules['warnings'].simplefilter("always") + yield WarningsRecorder(w) + # Filter the recorded warnings + reraise = list(w) + missing = [] + for msg, cat in filters: + seen = False + for w in reraise[:]: + warning = w.message + # Filter out the matching messages + if (re.match(msg, str(warning), re.I) and + issubclass(warning.__class__, cat)): + seen = True + reraise.remove(w) + if not seen and not quiet: + # This filter caught nothing + missing.append((msg, cat.__name__)) + if reraise: + raise AssertionError("unhandled warning %s" % reraise[0]) + if missing: + raise AssertionError("filter (%r, %s) did not catch any warning" % + missing[0]) + + +@contextlib.contextmanager +def check_warnings(*filters, **kwargs): + """Context manager to silence warnings. + + Accept 2-tuples as positional arguments: + ("message regexp", WarningCategory) + + Optional argument: + - if 'quiet' is True, it does not fail if a filter catches nothing + (default True without argument, + default False if some filters are defined) + + Without argument, it defaults to: + check_warnings(("", Warning), quiet=True) + """ + quiet = kwargs.get('quiet') + if not filters: + filters = (("", Warning),) + # Preserve backward compatibility + if quiet is None: + quiet = True + return _filterwarnings(filters, quiet) + + +class CleanImport(object): + """Context manager to force import to return a new module reference. + + This is useful for testing module-level behaviours, such as + the emission of a DeprecationWarning on import. + + Use like this: + + with CleanImport("foo"): + importlib.import_module("foo") # new reference + """ + + def __init__(self, *module_names): + self.original_modules = sys.modules.copy() + for module_name in module_names: + if module_name in sys.modules: + module = sys.modules[module_name] + # It is possible that module_name is just an alias for + # another module (e.g. stub for modules renamed in 3.x). + # In that case, we also need delete the real module to clear + # the import cache. + if module.__name__ != module_name: + del sys.modules[module.__name__] + del sys.modules[module_name] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.modules.update(self.original_modules) + +### Added for python-future: +if utils.PY3: + import collections.abc + mybase = collections.abc.MutableMapping +else: + import UserDict + mybase = UserDict.DictMixin +### + +class EnvironmentVarGuard(mybase): + + """Class to help protect the environment variable properly. Can be used as + a context manager.""" + + def __init__(self): + self._environ = os.environ + self._changed = {} + + def __getitem__(self, envvar): + return self._environ[envvar] + + def __setitem__(self, envvar, value): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + self._environ[envvar] = value + + def __delitem__(self, envvar): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + if envvar in self._environ: + del self._environ[envvar] + + def keys(self): + return self._environ.keys() + + def __iter__(self): + return iter(self._environ) + + def __len__(self): + return len(self._environ) + + def set(self, envvar, value): + self[envvar] = value + + def unset(self, envvar): + del self[envvar] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + for (k, v) in self._changed.items(): + if v is None: + if k in self._environ: + del self._environ[k] + else: + self._environ[k] = v + os.environ = self._environ + + +class DirsOnSysPath(object): + """Context manager to temporarily add directories to sys.path. + + This makes a copy of sys.path, appends any directories given + as positional arguments, then reverts sys.path to the copied + settings when the context ends. + + Note that *all* sys.path modifications in the body of the + context manager, including replacement of the object, + will be reverted at the end of the block. + """ + + def __init__(self, *paths): + self.original_value = sys.path[:] + self.original_object = sys.path + sys.path.extend(paths) + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.path = self.original_object + sys.path[:] = self.original_value + + +class TransientResource(object): + + """Raise ResourceDenied if an exception is raised while the context manager + is in effect that matches the specified exception and attributes.""" + + def __init__(self, exc, **kwargs): + self.exc = exc + self.attrs = kwargs + + def __enter__(self): + return self + + def __exit__(self, type_=None, value=None, traceback=None): + """If type_ is a subclass of self.exc and value has attributes matching + self.attrs, raise ResourceDenied. Otherwise let the exception + propagate (if any).""" + if type_ is not None and issubclass(self.exc, type_): + for attr, attr_value in self.attrs.items(): + if not hasattr(value, attr): + break + if getattr(value, attr) != attr_value: + break + else: + raise ResourceDenied("an optional resource is not available") + +# Context managers that raise ResourceDenied when various issues +# with the Internet connection manifest themselves as exceptions. +# XXX deprecate these and use transient_internet() instead +time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) +socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) +ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) + + +@contextlib.contextmanager +def transient_internet(resource_name, timeout=30.0, errnos=()): + """Return a context manager that raises ResourceDenied when various issues + with the Internet connection manifest themselves as exceptions.""" + default_errnos = [ + ('ECONNREFUSED', 111), + ('ECONNRESET', 104), + ('EHOSTUNREACH', 113), + ('ENETUNREACH', 101), + ('ETIMEDOUT', 110), + ] + default_gai_errnos = [ + ('EAI_AGAIN', -3), + ('EAI_FAIL', -4), + ('EAI_NONAME', -2), + ('EAI_NODATA', -5), + # Encountered when trying to resolve IPv6-only hostnames + ('WSANO_DATA', 11004), + ] + + denied = ResourceDenied("Resource %r is not available" % resource_name) + captured_errnos = errnos + gai_errnos = [] + if not captured_errnos: + captured_errnos = [getattr(errno, name, num) + for (name, num) in default_errnos] + gai_errnos = [getattr(socket, name, num) + for (name, num) in default_gai_errnos] + + def filter_error(err): + n = getattr(err, 'errno', None) + if (isinstance(err, socket.timeout) or + (isinstance(err, socket.gaierror) and n in gai_errnos) or + n in captured_errnos): + if not verbose: + sys.stderr.write(denied.args[0] + "\n") + # Was: raise denied from err + # For Python-Future: + exc = denied + exc.__cause__ = err + raise exc + + old_timeout = socket.getdefaulttimeout() + try: + if timeout is not None: + socket.setdefaulttimeout(timeout) + yield + except IOError as err: + # urllib can wrap original socket errors multiple times (!), we must + # unwrap to get at the original error. + while True: + a = err.args + if len(a) >= 1 and isinstance(a[0], IOError): + err = a[0] + # The error can also be wrapped as args[1]: + # except socket.error as msg: + # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) + elif len(a) >= 2 and isinstance(a[1], IOError): + err = a[1] + else: + break + filter_error(err) + raise + # XXX should we catch generic exceptions and look for their + # __cause__ or __context__? + finally: + socket.setdefaulttimeout(old_timeout) + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO.""" + import io + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, io.StringIO()) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as s: + print("hello") + self.assertEqual(s.getvalue(), "hello") + """ + return captured_output("stdout") + +def captured_stderr(): + return captured_output("stderr") + +def captured_stdin(): + return captured_output("stdin") + + +def gc_collect(): + """Force as many objects as possible to be collected. + + In non-CPython implementations of Python, this is needed because timely + deallocation is not guaranteed by the garbage collector. (Even in CPython + this can be the case in case of reference cycles.) This means that __del__ + methods may be called later than expected and weakrefs may remain alive for + longer than expected. This function tries its best to force all garbage + objects to disappear. + """ + gc.collect() + if is_jython: + time.sleep(0.1) + gc.collect() + gc.collect() + +@contextlib.contextmanager +def disable_gc(): + have_gc = gc.isenabled() + gc.disable() + try: + yield + finally: + if have_gc: + gc.enable() + + +def python_is_optimized(): + """Find if Python was built with optimizations.""" + # We don't have sysconfig on Py2.6: + import sysconfig + cflags = sysconfig.get_config_var('PY_CFLAGS') or '' + final_opt = "" + for opt in cflags.split(): + if opt.startswith('-O'): + final_opt = opt + return final_opt != '' and final_opt != '-O0' + + +_header = 'nP' +_align = '0n' +if hasattr(sys, "gettotalrefcount"): + _header = '2P' + _header + _align = '0P' +_vheader = _header + 'n' + +def calcobjsize(fmt): + return struct.calcsize(_header + fmt + _align) + +def calcvobjsize(fmt): + return struct.calcsize(_vheader + fmt + _align) + + +_TPFLAGS_HAVE_GC = 1<<14 +_TPFLAGS_HEAPTYPE = 1<<9 + +def check_sizeof(test, o, size): + result = sys.getsizeof(o) + # add GC header size + if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\ + ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))): + size += _testcapi.SIZEOF_PYGC_HEAD + msg = 'wrong size for %s: got %d, expected %d' \ + % (type(o), result, size) + test.assertEqual(result, size, msg) + +#======================================================================= +# Decorator for running a function in a different locale, correctly resetting +# it afterwards. + +def run_with_locale(catstr, *locales): + def decorator(func): + def inner(*args, **kwds): + try: + import locale + category = getattr(locale, catstr) + orig_locale = locale.setlocale(category) + except AttributeError: + # if the test author gives us an invalid category string + raise + except: + # cannot retrieve original locale, so do nothing + locale = orig_locale = None + else: + for loc in locales: + try: + locale.setlocale(category, loc) + break + except: + pass + + # now run the function, resetting the locale on exceptions + try: + return func(*args, **kwds) + finally: + if locale and orig_locale: + locale.setlocale(category, orig_locale) + inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator + +#======================================================================= +# Decorator for running a function in a specific timezone, correctly +# resetting it afterwards. + +def run_with_tz(tz): + def decorator(func): + def inner(*args, **kwds): + try: + tzset = time.tzset + except AttributeError: + raise unittest.SkipTest("tzset required") + if 'TZ' in os.environ: + orig_tz = os.environ['TZ'] + else: + orig_tz = None + os.environ['TZ'] = tz + tzset() + + # now run the function, resetting the tz on exceptions + try: + return func(*args, **kwds) + finally: + if orig_tz is None: + del os.environ['TZ'] + else: + os.environ['TZ'] = orig_tz + time.tzset() + + inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator + +#======================================================================= +# Big-memory-test support. Separate from 'resources' because memory use +# should be configurable. + +# Some handy shorthands. Note that these are used for byte-limits as well +# as size-limits, in the various bigmem tests +_1M = 1024*1024 +_1G = 1024 * _1M +_2G = 2 * _1G +_4G = 4 * _1G + +MAX_Py_ssize_t = sys.maxsize + +def set_memlimit(limit): + global max_memuse + global real_max_memuse + sizes = { + 'k': 1024, + 'm': _1M, + 'g': _1G, + 't': 1024*_1G, + } + m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, + re.IGNORECASE | re.VERBOSE) + if m is None: + raise ValueError('Invalid memory limit %r' % (limit,)) + memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) + real_max_memuse = memlimit + if memlimit > MAX_Py_ssize_t: + memlimit = MAX_Py_ssize_t + if memlimit < _2G - 1: + raise ValueError('Memory limit %r too low to be useful' % (limit,)) + max_memuse = memlimit + +class _MemoryWatchdog(object): + """An object which periodically watches the process' memory consumption + and prints it out. + """ + + def __init__(self): + self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) + self.started = False + + def start(self): + try: + f = open(self.procfile, 'r') + except OSError as e: + warnings.warn('/proc not available for stats: {0}'.format(e), + RuntimeWarning) + sys.stderr.flush() + return + + watchdog_script = findfile("memory_watchdog.py") + self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], + stdin=f, stderr=subprocess.DEVNULL) + f.close() + self.started = True + + def stop(self): + if self.started: + self.mem_watchdog.terminate() + self.mem_watchdog.wait() + + +def bigmemtest(size, memuse, dry_run=True): + """Decorator for bigmem tests. + + 'minsize' is the minimum useful size for the test (in arbitrary, + test-interpreted units.) 'memuse' is the number of 'bytes per size' for + the test, or a good estimate of it. + + if 'dry_run' is False, it means the test doesn't support dummy runs + when -M is not specified. + """ + def decorator(f): + def wrapper(self): + size = wrapper.size + memuse = wrapper.memuse + if not real_max_memuse: + maxsize = 5147 + else: + maxsize = size + + if ((real_max_memuse or not dry_run) + and real_max_memuse < maxsize * memuse): + raise unittest.SkipTest( + "not enough memory: %.1fG minimum needed" + % (size * memuse / (1024 ** 3))) + + if real_max_memuse and verbose: + print() + print(" ... expected peak memory use: {peak:.1f}G" + .format(peak=size * memuse / (1024 ** 3))) + watchdog = _MemoryWatchdog() + watchdog.start() + else: + watchdog = None + + try: + return f(self, maxsize) + finally: + if watchdog: + watchdog.stop() + + wrapper.size = size + wrapper.memuse = memuse + return wrapper + return decorator + +def bigaddrspacetest(f): + """Decorator for tests that fill the address space.""" + def wrapper(self): + if max_memuse < MAX_Py_ssize_t: + if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: + raise unittest.SkipTest( + "not enough memory: try a 32-bit build instead") + else: + raise unittest.SkipTest( + "not enough memory: %.1fG minimum needed" + % (MAX_Py_ssize_t / (1024 ** 3))) + else: + return f(self) + return wrapper + +#======================================================================= +# unittest integration. + +class BasicTestRunner(object): + def run(self, test): + result = unittest.TestResult() + test(result) + return result + +def _id(obj): + return obj + +def requires_resource(resource): + if resource == 'gui' and not _is_gui_available(): + return unittest.skip("resource 'gui' is not available") + if is_resource_enabled(resource): + return _id + else: + return unittest.skip("resource {0!r} is not enabled".format(resource)) + +def cpython_only(test): + """ + Decorator for tests only applicable on CPython. + """ + return impl_detail(cpython=True)(test) + +def impl_detail(msg=None, **guards): + if check_impl_detail(**guards): + return _id + if msg is None: + guardnames, default = _parse_guards(guards) + if default: + msg = "implementation detail not available on {0}" + else: + msg = "implementation detail specific to {0}" + guardnames = sorted(guardnames.keys()) + msg = msg.format(' or '.join(guardnames)) + return unittest.skip(msg) + +def _parse_guards(guards): + # Returns a tuple ({platform_name: run_me}, default_value) + if not guards: + return ({'cpython': True}, False) + is_true = list(guards.values())[0] + assert list(guards.values()) == [is_true] * len(guards) # all True or all False + return (guards, not is_true) + +# Use the following check to guard CPython's implementation-specific tests -- +# or to run them only on the implementation(s) guarded by the arguments. +def check_impl_detail(**guards): + """This function returns True or False depending on the host platform. + Examples: + if check_impl_detail(): # only on CPython (default) + if check_impl_detail(jython=True): # only on Jython + if check_impl_detail(cpython=False): # everywhere except on CPython + """ + guards, default = _parse_guards(guards) + return guards.get(platform.python_implementation().lower(), default) + + +def no_tracing(func): + """Decorator to temporarily turn off tracing for the duration of a test.""" + if not hasattr(sys, 'gettrace'): + return func + else: + @functools.wraps(func) + def wrapper(*args, **kwargs): + original_trace = sys.gettrace() + try: + sys.settrace(None) + return func(*args, **kwargs) + finally: + sys.settrace(original_trace) + return wrapper + + +def refcount_test(test): + """Decorator for tests which involve reference counting. + + To start, the decorator does not run the test if is not run by CPython. + After that, any trace function is unset during the test to prevent + unexpected refcounts caused by the trace function. + + """ + return no_tracing(cpython_only(test)) + + +def _filter_suite(suite, pred): + """Recursively filter test cases in a suite based on a predicate.""" + newtests = [] + for test in suite._tests: + if isinstance(test, unittest.TestSuite): + _filter_suite(test, pred) + newtests.append(test) + else: + if pred(test): + newtests.append(test) + suite._tests = newtests + +def _run_suite(suite): + """Run tests from a unittest.TestSuite-derived class.""" + if verbose: + runner = unittest.TextTestRunner(sys.stdout, verbosity=2, + failfast=failfast) + else: + runner = BasicTestRunner() + + result = runner.run(suite) + if not result.wasSuccessful(): + if len(result.errors) == 1 and not result.failures: + err = result.errors[0][1] + elif len(result.failures) == 1 and not result.errors: + err = result.failures[0][1] + else: + err = "multiple errors occurred" + if not verbose: err += "; run in verbose mode for details" + raise TestFailed(err) + + +def run_unittest(*classes): + """Run tests from unittest.TestCase-derived classes.""" + valid_types = (unittest.TestSuite, unittest.TestCase) + suite = unittest.TestSuite() + for cls in classes: + if isinstance(cls, str): + if cls in sys.modules: + suite.addTest(unittest.findTestCases(sys.modules[cls])) + else: + raise ValueError("str arguments must be keys in sys.modules") + elif isinstance(cls, valid_types): + suite.addTest(cls) + else: + suite.addTest(unittest.makeSuite(cls)) + def case_pred(test): + if match_tests is None: + return True + for name in test.id().split("."): + if fnmatch.fnmatchcase(name, match_tests): + return True + return False + _filter_suite(suite, case_pred) + _run_suite(suite) + +# We don't have sysconfig on Py2.6: +# #======================================================================= +# # Check for the presence of docstrings. +# +# HAVE_DOCSTRINGS = (check_impl_detail(cpython=False) or +# sys.platform == 'win32' or +# sysconfig.get_config_var('WITH_DOC_STRINGS')) +# +# requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS, +# "test requires docstrings") +# +# +# #======================================================================= +# doctest driver. + +def run_doctest(module, verbosity=None, optionflags=0): + """Run doctest on the given module. Return (#failures, #tests). + + If optional argument verbosity is not specified (or is None), pass + support's belief about verbosity on to doctest. Else doctest's + usual behavior is used (it searches sys.argv for -v). + """ + + import doctest + + if verbosity is None: + verbosity = verbose + else: + verbosity = None + + f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags) + if f: + raise TestFailed("%d of %d doctests failed" % (f, t)) + if verbose: + print('doctest (%s) ... %d tests with zero failures' % + (module.__name__, t)) + return f, t + + +#======================================================================= +# Support for saving and restoring the imported modules. + +def modules_setup(): + return sys.modules.copy(), + +def modules_cleanup(oldmodules): + # Encoders/decoders are registered permanently within the internal + # codec cache. If we destroy the corresponding modules their + # globals will be set to None which will trip up the cached functions. + encodings = [(k, v) for k, v in sys.modules.items() + if k.startswith('encodings.')] + # Was: + # sys.modules.clear() + # Py2-compatible: + for i in range(len(sys.modules)): + sys.modules.pop() + + sys.modules.update(encodings) + # XXX: This kind of problem can affect more than just encodings. In particular + # extension modules (such as _ssl) don't cope with reloading properly. + # Really, test modules should be cleaning out the test specific modules they + # know they added (ala test_runpy) rather than relying on this function (as + # test_importhooks and test_pkg do currently). + # Implicitly imported *real* modules should be left alone (see issue 10556). + sys.modules.update(oldmodules) + +#======================================================================= +# Backported versions of threading_setup() and threading_cleanup() which don't refer +# to threading._dangling (not available on Py2.7). + +# Threading support to prevent reporting refleaks when running regrtest.py -R + +# NOTE: we use thread._count() rather than threading.enumerate() (or the +# moral equivalent thereof) because a threading.Thread object is still alive +# until its __bootstrap() method has returned, even after it has been +# unregistered from the threading module. +# thread._count(), on the other hand, only gets decremented *after* the +# __bootstrap() method has returned, which gives us reliable reference counts +# at the end of a test run. + +def threading_setup(): + if _thread: + return _thread._count(), + else: + return 1, + +def threading_cleanup(nb_threads): + if not _thread: + return + + _MAX_COUNT = 10 + for count in range(_MAX_COUNT): + n = _thread._count() + if n == nb_threads: + break + time.sleep(0.1) + # XXX print a warning in case of failure? + +def reap_threads(func): + """Use this function when threads are being used. This will + ensure that the threads are cleaned up even when the test fails. + If threading is unavailable this function does nothing. + """ + if not _thread: + return func + + @functools.wraps(func) + def decorator(*args): + key = threading_setup() + try: + return func(*args) + finally: + threading_cleanup(*key) + return decorator + +def reap_children(): + """Use this function at the end of test_main() whenever sub-processes + are started. This will help ensure that no extra children (zombies) + stick around to hog resources and create problems when looking + for refleaks. + """ + + # Reap all our dead child processes so we don't leave zombies around. + # These hog resources and might be causing some of the buildbots to die. + if hasattr(os, 'waitpid'): + any_process = -1 + while True: + try: + # This will raise an exception on Windows. That's ok. + pid, status = os.waitpid(any_process, os.WNOHANG) + if pid == 0: + break + except: + break + +@contextlib.contextmanager +def swap_attr(obj, attr, new_val): + """Temporary swap out an attribute with a new object. + + Usage: + with swap_attr(obj, "attr", 5): + ... + + This will set obj.attr to 5 for the duration of the with: block, + restoring the old value at the end of the block. If `attr` doesn't + exist on `obj`, it will be created and then deleted at the end of the + block. + """ + if hasattr(obj, attr): + real_val = getattr(obj, attr) + setattr(obj, attr, new_val) + try: + yield + finally: + setattr(obj, attr, real_val) + else: + setattr(obj, attr, new_val) + try: + yield + finally: + delattr(obj, attr) + +@contextlib.contextmanager +def swap_item(obj, item, new_val): + """Temporary swap out an item with a new object. + + Usage: + with swap_item(obj, "item", 5): + ... + + This will set obj["item"] to 5 for the duration of the with: block, + restoring the old value at the end of the block. If `item` doesn't + exist on `obj`, it will be created and then deleted at the end of the + block. + """ + if item in obj: + real_val = obj[item] + obj[item] = new_val + try: + yield + finally: + obj[item] = real_val + else: + obj[item] = new_val + try: + yield + finally: + del obj[item] + +def strip_python_stderr(stderr): + """Strip the stderr of a Python process from potential debug output + emitted by the interpreter. + + This will typically be run on the result of the communicate() method + of a subprocess.Popen object. + """ + stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() + return stderr + +def args_from_interpreter_flags(): + """Return a list of command-line arguments reproducing the current + settings in sys.flags and sys.warnoptions.""" + return subprocess._args_from_interpreter_flags() + +#============================================================ +# Support for assertions about logging. +#============================================================ + +class TestHandler(logging.handlers.BufferingHandler): + def __init__(self, matcher): + # BufferingHandler takes a "capacity" argument + # so as to know when to flush. As we're overriding + # shouldFlush anyway, we can set a capacity of zero. + # You can call flush() manually to clear out the + # buffer. + logging.handlers.BufferingHandler.__init__(self, 0) + self.matcher = matcher + + def shouldFlush(self): + return False + + def emit(self, record): + self.format(record) + self.buffer.append(record.__dict__) + + def matches(self, **kwargs): + """ + Look for a saved dict whose keys/values match the supplied arguments. + """ + result = False + for d in self.buffer: + if self.matcher.matches(d, **kwargs): + result = True + break + return result + +class Matcher(object): + + _partial_matches = ('msg', 'message') + + def matches(self, d, **kwargs): + """ + Try to match a single dict with the supplied arguments. + + Keys whose values are strings and which are in self._partial_matches + will be checked for partial (i.e. substring) matches. You can extend + this scheme to (for example) do regular expression matching, etc. + """ + result = True + for k in kwargs: + v = kwargs[k] + dv = d.get(k) + if not self.match_value(k, dv, v): + result = False + break + return result + + def match_value(self, k, dv, v): + """ + Try to match a single stored value (dv) with a supplied value (v). + """ + if type(v) != type(dv): + result = False + elif type(dv) is not str or k not in self._partial_matches: + result = (v == dv) + else: + result = dv.find(v) >= 0 + return result + + +_can_symlink = None +def can_symlink(): + global _can_symlink + if _can_symlink is not None: + return _can_symlink + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + _can_symlink = can + return can + +def skip_unless_symlink(test): + """Skip decorator for tests that require functional symlink""" + ok = can_symlink() + msg = "Requires functional symlink implementation" + return test if ok else unittest.skip(msg)(test) + +_can_xattr = None +def can_xattr(): + global _can_xattr + if _can_xattr is not None: + return _can_xattr + if not hasattr(os, "setxattr"): + can = False + else: + tmp_fp, tmp_name = tempfile.mkstemp() + try: + with open(TESTFN, "wb") as fp: + try: + # TESTFN & tempfile may use different file systems with + # different capabilities + os.setxattr(tmp_fp, b"user.test", b"") + os.setxattr(fp.fileno(), b"user.test", b"") + # Kernels < 2.6.39 don't respect setxattr flags. + kernel_version = platform.release() + m = re.match("2.6.(\d{1,2})", kernel_version) + can = m is None or int(m.group(1)) >= 39 + except OSError: + can = False + finally: + unlink(TESTFN) + unlink(tmp_name) + _can_xattr = can + return can + +def skip_unless_xattr(test): + """Skip decorator for tests that require functional extended attributes""" + ok = can_xattr() + msg = "no non-broken extended attribute support" + return test if ok else unittest.skip(msg)(test) + + +if sys.platform.startswith('win'): + @contextlib.contextmanager + def suppress_crash_popup(): + """Disable Windows Error Reporting dialogs using SetErrorMode.""" + # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621%28v=vs.85%29.aspx + # GetErrorMode is not available on Windows XP and Windows Server 2003, + # but SetErrorMode returns the previous value, so we can use that + import ctypes + k32 = ctypes.windll.kernel32 + SEM_NOGPFAULTERRORBOX = 0x02 + old_error_mode = k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) + k32.SetErrorMode(old_error_mode | SEM_NOGPFAULTERRORBOX) + try: + yield + finally: + k32.SetErrorMode(old_error_mode) +else: + # this is a no-op for other platforms + @contextlib.contextmanager + def suppress_crash_popup(): + yield + + +def patch(test_instance, object_to_patch, attr_name, new_value): + """Override 'object_to_patch'.'attr_name' with 'new_value'. + + Also, add a cleanup procedure to 'test_instance' to restore + 'object_to_patch' value for 'attr_name'. + The 'attr_name' should be a valid attribute for 'object_to_patch'. + + """ + # check that 'attr_name' is a real attribute for 'object_to_patch' + # will raise AttributeError if it does not exist + getattr(object_to_patch, attr_name) + + # keep a copy of the old value + attr_is_local = False + try: + old_value = object_to_patch.__dict__[attr_name] + except (AttributeError, KeyError): + old_value = getattr(object_to_patch, attr_name, None) + else: + attr_is_local = True + + # restore the value when the test is done + def cleanup(): + if attr_is_local: + setattr(object_to_patch, attr_name, old_value) + else: + delattr(object_to_patch, attr_name) + + test_instance.addCleanup(cleanup) + + # actually override the attribute + setattr(object_to_patch, attr_name, new_value) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/total_ordering.py b/.install/.kodi/addons/script.module.future/libs/future/backports/total_ordering.py new file mode 100644 index 000000000..760f06d6c --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/total_ordering.py @@ -0,0 +1,38 @@ +""" +For Python < 2.7.2. total_ordering in versions prior to 2.7.2 is buggy. +See http://bugs.python.org/issue10042 for details. For these versions use +code borrowed from Python 2.7.3. + +From django.utils. +""" + +import sys +if sys.version_info >= (2, 7, 2): + from functools import total_ordering +else: + def total_ordering(cls): + """Class decorator that fills in missing ordering methods""" + convert = { + '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)), + ('__le__', lambda self, other: self < other or self == other), + ('__ge__', lambda self, other: not self < other)], + '__le__': [('__ge__', lambda self, other: not self <= other or self == other), + ('__lt__', lambda self, other: self <= other and not self == other), + ('__gt__', lambda self, other: not self <= other)], + '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)), + ('__ge__', lambda self, other: self > other or self == other), + ('__le__', lambda self, other: not self > other)], + '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other), + ('__gt__', lambda self, other: self >= other and not self == other), + ('__lt__', lambda self, other: not self >= other)] + } + roots = set(dir(cls)) & set(convert) + if not roots: + raise ValueError('must define at least one ordering operation: < > <= >=') + root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__ + for opname, opfunc in convert[root]: + if opname not in roots: + opfunc.__name__ = opname + opfunc.__doc__ = getattr(int, opname).__doc__ + setattr(cls, opname, opfunc) + return cls diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/error.py b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/error.py new file mode 100644 index 000000000..a473e4453 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/error.py @@ -0,0 +1,75 @@ +"""Exception classes raised by urllib. + +The base exception class is URLError, which inherits from IOError. It +doesn't define any behavior of its own, but is the base class for all +exceptions defined in this package. + +HTTPError is an exception class that is also a valid HTTP response +instance. It behaves this way because HTTP protocol errors are valid +responses, with a status code, headers, and a body. In some contexts, +an application may want to handle an exception like a regular +response. +""" +from __future__ import absolute_import, division, unicode_literals +from future import standard_library + +from future.backports.urllib import response as urllib_response + + +__all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] + + +# do these error classes make sense? +# make sure all of the IOError stuff is overridden. we just want to be +# subtypes. + +class URLError(IOError): + # URLError is a sub-type of IOError, but it doesn't share any of + # the implementation. need to override __init__ and __str__. + # It sets self.args for compatibility with other EnvironmentError + # subclasses, but args doesn't have the typical format with errno in + # slot 0 and strerror in slot 1. This may be better than nothing. + def __init__(self, reason, filename=None): + self.args = reason, + self.reason = reason + if filename is not None: + self.filename = filename + + def __str__(self): + return '' % self.reason + +class HTTPError(URLError, urllib_response.addinfourl): + """Raised when HTTP error occurs, but also acts like non-error return""" + __super_init = urllib_response.addinfourl.__init__ + + def __init__(self, url, code, msg, hdrs, fp): + self.code = code + self.msg = msg + self.hdrs = hdrs + self.fp = fp + self.filename = url + # The addinfourl classes depend on fp being a valid file + # object. In some cases, the HTTPError may not have a valid + # file object. If this happens, the simplest workaround is to + # not initialize the base classes. + if fp is not None: + self.__super_init(fp, hdrs, url, code) + + def __str__(self): + return 'HTTP Error %s: %s' % (self.code, self.msg) + + # since URLError specifies a .reason attribute, HTTPError should also + # provide this attribute. See issue13211 for discussion. + @property + def reason(self): + return self.msg + + def info(self): + return self.hdrs + + +# exception raised when downloaded size does not match content-length +class ContentTooShortError(URLError): + def __init__(self, message, content): + URLError.__init__(self, message) + self.content = content diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/parse.py b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/parse.py new file mode 100644 index 000000000..04e52d492 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/parse.py @@ -0,0 +1,991 @@ +""" +Ported using Python-Future from the Python 3.3 standard library. + +Parse (absolute and relative) URLs. + +urlparse module is based upon the following RFC specifications. + +RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding +and L. Masinter, January 2005. + +RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter +and L.Masinter, December 1999. + +RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. +Berners-Lee, R. Fielding, and L. Masinter, August 1998. + +RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. + +RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June +1995. + +RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. +McCahill, December 1994 + +RFC 3986 is considered the current standard and any future changes to +urlparse module should conform with it. The urlparse module is +currently not entirely compliant with this RFC due to defacto +scenarios for parsing, and for backward compatibility purposes, some +parsing quirks from older RFCs are retained. The testcases in +test_urlparse.py provides a good indicator of parsing behavior. +""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import bytes, chr, dict, int, range, str +from future.utils import raise_with_traceback + +import re +import sys +import collections + +__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag", + "urlsplit", "urlunsplit", "urlencode", "parse_qs", + "parse_qsl", "quote", "quote_plus", "quote_from_bytes", + "unquote", "unquote_plus", "unquote_to_bytes"] + +# A classification of schemes ('' means apply by default) +uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', + 'wais', 'file', 'https', 'shttp', 'mms', + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn', 'svn+ssh'] +uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', + 'imap', 'wais', 'file', 'mms', 'https', 'shttp', + 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', + 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh'] +uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap', + 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips', + 'mms', '', 'sftp', 'tel'] + +# These are not actually used anymore, but should stay for backwards +# compatibility. (They are undocumented, but have a public-looking name.) +non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', + 'telnet', 'wais', 'imap', 'snews', 'sip', 'sips'] +uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms', + 'gopher', 'rtsp', 'rtspu', 'sip', 'sips', ''] +uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news', + 'nntp', 'wais', 'https', 'shttp', 'snews', + 'file', 'prospero', ''] + +# Characters valid in scheme names +scheme_chars = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789' + '+-.') + +# XXX: Consider replacing with functools.lru_cache +MAX_CACHE_SIZE = 20 +_parse_cache = {} + +def clear_cache(): + """Clear the parse cache and the quoters cache.""" + _parse_cache.clear() + _safe_quoters.clear() + + +# Helpers for bytes handling +# For 3.2, we deliberately require applications that +# handle improperly quoted URLs to do their own +# decoding and encoding. If valid use cases are +# presented, we may relax this by using latin-1 +# decoding internally for 3.3 +_implicit_encoding = 'ascii' +_implicit_errors = 'strict' + +def _noop(obj): + return obj + +def _encode_result(obj, encoding=_implicit_encoding, + errors=_implicit_errors): + return obj.encode(encoding, errors) + +def _decode_args(args, encoding=_implicit_encoding, + errors=_implicit_errors): + return tuple(x.decode(encoding, errors) if x else '' for x in args) + +def _coerce_args(*args): + # Invokes decode if necessary to create str args + # and returns the coerced inputs along with + # an appropriate result coercion function + # - noop for str inputs + # - encoding function otherwise + str_input = isinstance(args[0], str) + for arg in args[1:]: + # We special-case the empty string to support the + # "scheme=''" default argument to some functions + if arg and isinstance(arg, str) != str_input: + raise TypeError("Cannot mix str and non-str arguments") + if str_input: + return args + (_noop,) + return _decode_args(args) + (_encode_result,) + +# Result objects are more helpful than simple tuples +class _ResultMixinStr(object): + """Standard approach to encoding parsed results from str to bytes""" + __slots__ = () + + def encode(self, encoding='ascii', errors='strict'): + return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self)) + + +class _ResultMixinBytes(object): + """Standard approach to decoding parsed results from bytes to str""" + __slots__ = () + + def decode(self, encoding='ascii', errors='strict'): + return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self)) + + +class _NetlocResultMixinBase(object): + """Shared methods for the parsed result objects containing a netloc element""" + __slots__ = () + + @property + def username(self): + return self._userinfo[0] + + @property + def password(self): + return self._userinfo[1] + + @property + def hostname(self): + hostname = self._hostinfo[0] + if not hostname: + hostname = None + elif hostname is not None: + hostname = hostname.lower() + return hostname + + @property + def port(self): + port = self._hostinfo[1] + if port is not None: + port = int(port, 10) + # Return None on an illegal port + if not ( 0 <= port <= 65535): + return None + return port + + +class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr): + __slots__ = () + + @property + def _userinfo(self): + netloc = self.netloc + userinfo, have_info, hostinfo = netloc.rpartition('@') + if have_info: + username, have_password, password = userinfo.partition(':') + if not have_password: + password = None + else: + username = password = None + return username, password + + @property + def _hostinfo(self): + netloc = self.netloc + _, _, hostinfo = netloc.rpartition('@') + _, have_open_br, bracketed = hostinfo.partition('[') + if have_open_br: + hostname, _, port = bracketed.partition(']') + _, have_port, port = port.partition(':') + else: + hostname, have_port, port = hostinfo.partition(':') + if not have_port: + port = None + return hostname, port + + +class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes): + __slots__ = () + + @property + def _userinfo(self): + netloc = self.netloc + userinfo, have_info, hostinfo = netloc.rpartition(b'@') + if have_info: + username, have_password, password = userinfo.partition(b':') + if not have_password: + password = None + else: + username = password = None + return username, password + + @property + def _hostinfo(self): + netloc = self.netloc + _, _, hostinfo = netloc.rpartition(b'@') + _, have_open_br, bracketed = hostinfo.partition(b'[') + if have_open_br: + hostname, _, port = bracketed.partition(b']') + _, have_port, port = port.partition(b':') + else: + hostname, have_port, port = hostinfo.partition(b':') + if not have_port: + port = None + return hostname, port + + +from collections import namedtuple + +_DefragResultBase = namedtuple('DefragResult', 'url fragment') +_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment') +_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment') + +# For backwards compatibility, alias _NetlocResultMixinStr +# ResultBase is no longer part of the documented API, but it is +# retained since deprecating it isn't worth the hassle +ResultBase = _NetlocResultMixinStr + +# Structured result objects for string data +class DefragResult(_DefragResultBase, _ResultMixinStr): + __slots__ = () + def geturl(self): + if self.fragment: + return self.url + '#' + self.fragment + else: + return self.url + +class SplitResult(_SplitResultBase, _NetlocResultMixinStr): + __slots__ = () + def geturl(self): + return urlunsplit(self) + +class ParseResult(_ParseResultBase, _NetlocResultMixinStr): + __slots__ = () + def geturl(self): + return urlunparse(self) + +# Structured result objects for bytes data +class DefragResultBytes(_DefragResultBase, _ResultMixinBytes): + __slots__ = () + def geturl(self): + if self.fragment: + return self.url + b'#' + self.fragment + else: + return self.url + +class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes): + __slots__ = () + def geturl(self): + return urlunsplit(self) + +class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes): + __slots__ = () + def geturl(self): + return urlunparse(self) + +# Set up the encode/decode result pairs +def _fix_result_transcoding(): + _result_pairs = ( + (DefragResult, DefragResultBytes), + (SplitResult, SplitResultBytes), + (ParseResult, ParseResultBytes), + ) + for _decoded, _encoded in _result_pairs: + _decoded._encoded_counterpart = _encoded + _encoded._decoded_counterpart = _decoded + +_fix_result_transcoding() +del _fix_result_transcoding + +def urlparse(url, scheme='', allow_fragments=True): + """Parse a URL into 6 components: + :///;?# + Return a 6-tuple: (scheme, netloc, path, params, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + url, scheme, _coerce_result = _coerce_args(url, scheme) + splitresult = urlsplit(url, scheme, allow_fragments) + scheme, netloc, url, query, fragment = splitresult + if scheme in uses_params and ';' in url: + url, params = _splitparams(url) + else: + params = '' + result = ParseResult(scheme, netloc, url, params, query, fragment) + return _coerce_result(result) + +def _splitparams(url): + if '/' in url: + i = url.find(';', url.rfind('/')) + if i < 0: + return url, '' + else: + i = url.find(';') + return url[:i], url[i+1:] + +def _splitnetloc(url, start=0): + delim = len(url) # position of end of domain part of url, default is end + for c in '/?#': # look for delimiters; the order is NOT important + wdelim = url.find(c, start) # find first of this delim + if wdelim >= 0: # if found + delim = min(delim, wdelim) # use earliest delim position + return url[start:delim], url[delim:] # return (domain, rest) + +def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL into 5 components: + :///?# + Return a 5-tuple: (scheme, netloc, path, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + url, scheme, _coerce_result = _coerce_args(url, scheme) + allow_fragments = bool(allow_fragments) + key = url, scheme, allow_fragments, type(url), type(scheme) + cached = _parse_cache.get(key, None) + if cached: + return _coerce_result(cached) + if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth + clear_cache() + netloc = query = fragment = '' + i = url.find(':') + if i > 0: + if url[:i] == 'http': # optimize the common case + scheme = url[:i].lower() + url = url[i+1:] + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) + for c in url[:i]: + if c not in scheme_chars: + break + else: + # make sure "url" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i+1:] + if not rest or any(c not in '0123456789' for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) + +def urlunparse(components): + """Put a parsed URL back together again. This may result in a + slightly different, but equivalent URL, if the URL that was parsed + originally had redundant delimiters, e.g. a ? with an empty query + (the draft states that these are equivalent).""" + scheme, netloc, url, params, query, fragment, _coerce_result = ( + _coerce_args(*components)) + if params: + url = "%s;%s" % (url, params) + return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment))) + +def urlunsplit(components): + """Combine the elements of a tuple as returned by urlsplit() into a + complete URL as a string. The data argument can be any five-item iterable. + This may result in a slightly different, but equivalent URL, if the URL that + was parsed originally had unnecessary delimiters (for example, a ? with an + empty query; the RFC states that these are equivalent).""" + scheme, netloc, url, query, fragment, _coerce_result = ( + _coerce_args(*components)) + if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'): + if url and url[:1] != '/': url = '/' + url + url = '//' + (netloc or '') + url + if scheme: + url = scheme + ':' + url + if query: + url = url + '?' + query + if fragment: + url = url + '#' + fragment + return _coerce_result(url) + +def urljoin(base, url, allow_fragments=True): + """Join a base URL and a possibly relative URL to form an absolute + interpretation of the latter.""" + if not base: + return url + if not url: + return base + base, url, _coerce_result = _coerce_args(base, url) + bscheme, bnetloc, bpath, bparams, bquery, bfragment = \ + urlparse(base, '', allow_fragments) + scheme, netloc, path, params, query, fragment = \ + urlparse(url, bscheme, allow_fragments) + if scheme != bscheme or scheme not in uses_relative: + return _coerce_result(url) + if scheme in uses_netloc: + if netloc: + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + netloc = bnetloc + if path[:1] == '/': + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + if not path and not params: + path = bpath + params = bparams + if not query: + query = bquery + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + segments = bpath.split('/')[:-1] + path.split('/') + # XXX The stuff below is bogus in various ways... + if segments[-1] == '.': + segments[-1] = '' + while '.' in segments: + segments.remove('.') + while 1: + i = 1 + n = len(segments) - 1 + while i < n: + if (segments[i] == '..' + and segments[i-1] not in ('', '..')): + del segments[i-1:i+1] + break + i = i+1 + else: + break + if segments == ['', '..']: + segments[-1] = '' + elif len(segments) >= 2 and segments[-1] == '..': + segments[-2:] = [''] + return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments), + params, query, fragment))) + +def urldefrag(url): + """Removes any existing fragment from URL. + + Returns a tuple of the defragmented URL and the fragment. If + the URL contained no fragments, the second element is the + empty string. + """ + url, _coerce_result = _coerce_args(url) + if '#' in url: + s, n, p, a, q, frag = urlparse(url) + defrag = urlunparse((s, n, p, a, q, '')) + else: + frag = '' + defrag = url + return _coerce_result(DefragResult(defrag, frag)) + +_hexdig = '0123456789ABCDEFabcdef' +_hextobyte = dict(((a + b).encode(), bytes([int(a + b, 16)])) + for a in _hexdig for b in _hexdig) + +def unquote_to_bytes(string): + """unquote_to_bytes('abc%20def') -> b'abc def'.""" + # Note: strings are encoded as UTF-8. This is only an issue if it contains + # unescaped non-ASCII characters, which URIs should not. + if not string: + # Is it a string-like object? + string.split + return bytes(b'') + if isinstance(string, str): + string = string.encode('utf-8') + ### For Python-Future: + # It is already a byte-string object, but force it to be newbytes here on + # Py2: + string = bytes(string) + ### + bits = string.split(b'%') + if len(bits) == 1: + return string + res = [bits[0]] + append = res.append + for item in bits[1:]: + try: + append(_hextobyte[item[:2]]) + append(item[2:]) + except KeyError: + append(b'%') + append(item) + return bytes(b'').join(res) + +_asciire = re.compile('([\x00-\x7f]+)') + +def unquote(string, encoding='utf-8', errors='replace'): + """Replace %xx escapes by their single-character equivalent. The optional + encoding and errors parameters specify how to decode percent-encoded + sequences into Unicode characters, as accepted by the bytes.decode() + method. + By default, percent-encoded sequences are decoded with UTF-8, and invalid + sequences are replaced by a placeholder character. + + unquote('abc%20def') -> 'abc def'. + """ + if '%' not in string: + string.split + return string + if encoding is None: + encoding = 'utf-8' + if errors is None: + errors = 'replace' + bits = _asciire.split(string) + res = [bits[0]] + append = res.append + for i in range(1, len(bits), 2): + append(unquote_to_bytes(bits[i]).decode(encoding, errors)) + append(bits[i + 1]) + return ''.join(res) + +def parse_qs(qs, keep_blank_values=False, strict_parsing=False, + encoding='utf-8', errors='replace'): + """Parse a query given as a string argument. + + Arguments: + + qs: percent-encoded query string to be parsed + + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. + + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. + + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + """ + parsed_result = {} + pairs = parse_qsl(qs, keep_blank_values, strict_parsing, + encoding=encoding, errors=errors) + for name, value in pairs: + if name in parsed_result: + parsed_result[name].append(value) + else: + parsed_result[name] = [value] + return parsed_result + +def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, + encoding='utf-8', errors='replace'): + """Parse a query given as a string argument. + + Arguments: + + qs: percent-encoded query string to be parsed + + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. A + true value indicates that blanks should be retained as blank + strings. The default false value indicates that blank values + are to be ignored and treated as if they were not included. + + strict_parsing: flag indicating what to do with parsing errors. If + false (the default), errors are silently ignored. If true, + errors raise a ValueError exception. + + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + + Returns a list, as G-d intended. + """ + qs, _coerce_result = _coerce_args(qs) + pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] + r = [] + for name_value in pairs: + if not name_value and not strict_parsing: + continue + nv = name_value.split('=', 1) + if len(nv) != 2: + if strict_parsing: + raise ValueError("bad query field: %r" % (name_value,)) + # Handle case of a control-name with no equal sign + if keep_blank_values: + nv.append('') + else: + continue + if len(nv[1]) or keep_blank_values: + name = nv[0].replace('+', ' ') + name = unquote(name, encoding=encoding, errors=errors) + name = _coerce_result(name) + value = nv[1].replace('+', ' ') + value = unquote(value, encoding=encoding, errors=errors) + value = _coerce_result(value) + r.append((name, value)) + return r + +def unquote_plus(string, encoding='utf-8', errors='replace'): + """Like unquote(), but also replace plus signs by spaces, as required for + unquoting HTML form values. + + unquote_plus('%7e/abc+def') -> '~/abc def' + """ + string = string.replace('+', ' ') + return unquote(string, encoding, errors) + +_ALWAYS_SAFE = frozenset(bytes(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + b'abcdefghijklmnopqrstuvwxyz' + b'0123456789' + b'_.-')) +_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE) +_safe_quoters = {} + +class Quoter(collections.defaultdict): + """A mapping from bytes (in range(0,256)) to strings. + + String values are percent-encoded byte values, unless the key < 128, and + in the "safe" set (either the specified safe set, or default set). + """ + # Keeps a cache internally, using defaultdict, for efficiency (lookups + # of cached keys don't call Python code at all). + def __init__(self, safe): + """safe: bytes object.""" + self.safe = _ALWAYS_SAFE.union(bytes(safe)) + + def __repr__(self): + # Without this, will just display as a defaultdict + return "" % dict(self) + + def __missing__(self, b): + # Handle a cache miss. Store quoted string in cache and return. + res = chr(b) if b in self.safe else '%{0:02X}'.format(b) + self[b] = res + return res + +def quote(string, safe='/', encoding=None, errors=None): + """quote('abc def') -> 'abc%20def' + + Each part of a URL, e.g. the path info, the query, etc., has a + different set of reserved characters that must be quoted. + + RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists + the following reserved characters. + + reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | + "$" | "," + + Each of these characters is reserved in some component of a URL, + but not necessarily in all of them. + + By default, the quote function is intended for quoting the path + section of a URL. Thus, it will not encode '/'. This character + is reserved, but in typical usage the quote function is being + called on a path where the existing slash characters are used as + reserved characters. + + string and safe may be either str or bytes objects. encoding must + not be specified if string is a str. + + The optional encoding and errors parameters specify how to deal with + non-ASCII characters, as accepted by the str.encode method. + By default, encoding='utf-8' (characters are encoded with UTF-8), and + errors='strict' (unsupported characters raise a UnicodeEncodeError). + """ + if isinstance(string, str): + if not string: + return string + if encoding is None: + encoding = 'utf-8' + if errors is None: + errors = 'strict' + string = string.encode(encoding, errors) + else: + if encoding is not None: + raise TypeError("quote() doesn't support 'encoding' for bytes") + if errors is not None: + raise TypeError("quote() doesn't support 'errors' for bytes") + return quote_from_bytes(string, safe) + +def quote_plus(string, safe='', encoding=None, errors=None): + """Like quote(), but also replace ' ' with '+', as required for quoting + HTML form values. Plus signs in the original string are escaped unless + they are included in safe. It also does not have safe default to '/'. + """ + # Check if ' ' in string, where string may either be a str or bytes. If + # there are no spaces, the regular quote will produce the right answer. + if ((isinstance(string, str) and ' ' not in string) or + (isinstance(string, bytes) and b' ' not in string)): + return quote(string, safe, encoding, errors) + if isinstance(safe, str): + space = str(' ') + else: + space = bytes(b' ') + string = quote(string, safe + space, encoding, errors) + return string.replace(' ', '+') + +def quote_from_bytes(bs, safe='/'): + """Like quote(), but accepts a bytes object rather than a str, and does + not perform string-to-bytes encoding. It always returns an ASCII string. + quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f' + """ + if not isinstance(bs, (bytes, bytearray)): + raise TypeError("quote_from_bytes() expected bytes") + if not bs: + return str('') + ### For Python-Future: + bs = bytes(bs) + ### + if isinstance(safe, str): + # Normalize 'safe' by converting to bytes and removing non-ASCII chars + safe = str(safe).encode('ascii', 'ignore') + else: + ### For Python-Future: + safe = bytes(safe) + ### + safe = bytes([c for c in safe if c < 128]) + if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): + return bs.decode() + try: + quoter = _safe_quoters[safe] + except KeyError: + _safe_quoters[safe] = quoter = Quoter(safe).__getitem__ + return str('').join([quoter(char) for char in bs]) + +def urlencode(query, doseq=False, safe='', encoding=None, errors=None): + """Encode a sequence of two-element tuples or dictionary into a URL query string. + + If any values in the query arg are sequences and doseq is true, each + sequence element is converted to a separate parameter. + + If the query arg is a sequence of two-element tuples, the order of the + parameters in the output will match the order of parameters in the + input. + + The query arg may be either a string or a bytes type. When query arg is a + string, the safe, encoding and error parameters are sent the quote_plus for + encoding. + """ + + if hasattr(query, "items"): + query = query.items() + else: + # It's a bother at times that strings and string-like objects are + # sequences. + try: + # non-sequence items should not work with len() + # non-empty strings will fail this + if len(query) and not isinstance(query[0], tuple): + raise TypeError + # Zero-length sequences of all types will get here and succeed, + # but that's a minor nit. Since the original implementation + # allowed empty dicts that type of behavior probably should be + # preserved for consistency + except TypeError: + ty, va, tb = sys.exc_info() + raise_with_traceback(TypeError("not a valid non-string sequence " + "or mapping object"), tb) + + l = [] + if not doseq: + for k, v in query: + if isinstance(k, bytes): + k = quote_plus(k, safe) + else: + k = quote_plus(str(k), safe, encoding, errors) + + if isinstance(v, bytes): + v = quote_plus(v, safe) + else: + v = quote_plus(str(v), safe, encoding, errors) + l.append(k + '=' + v) + else: + for k, v in query: + if isinstance(k, bytes): + k = quote_plus(k, safe) + else: + k = quote_plus(str(k), safe, encoding, errors) + + if isinstance(v, bytes): + v = quote_plus(v, safe) + l.append(k + '=' + v) + elif isinstance(v, str): + v = quote_plus(v, safe, encoding, errors) + l.append(k + '=' + v) + else: + try: + # Is this a sufficient test for sequence-ness? + x = len(v) + except TypeError: + # not a sequence + v = quote_plus(str(v), safe, encoding, errors) + l.append(k + '=' + v) + else: + # loop over the sequence + for elt in v: + if isinstance(elt, bytes): + elt = quote_plus(elt, safe) + else: + elt = quote_plus(str(elt), safe, encoding, errors) + l.append(k + '=' + elt) + return str('&').join(l) + +# Utilities to parse URLs (most of these return None for missing parts): +# unwrap('') --> 'type://host/path' +# splittype('type:opaquestring') --> 'type', 'opaquestring' +# splithost('//host[:port]/path') --> 'host[:port]', '/path' +# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]' +# splitpasswd('user:passwd') -> 'user', 'passwd' +# splitport('host:port') --> 'host', 'port' +# splitquery('/path?query') --> '/path', 'query' +# splittag('/path#tag') --> '/path', 'tag' +# splitattr('/path;attr1=value1;attr2=value2;...') -> +# '/path', ['attr1=value1', 'attr2=value2', ...] +# splitvalue('attr=value') --> 'attr', 'value' +# urllib.parse.unquote('abc%20def') -> 'abc def' +# quote('abc def') -> 'abc%20def') + +def to_bytes(url): + """to_bytes(u"URL") --> 'URL'.""" + # Most URL schemes require ASCII. If that changes, the conversion + # can be relaxed. + # XXX get rid of to_bytes() + if isinstance(url, str): + try: + url = url.encode("ASCII").decode() + except UnicodeError: + raise UnicodeError("URL " + repr(url) + + " contains non-ASCII characters") + return url + +def unwrap(url): + """unwrap('') --> 'type://host/path'.""" + url = str(url).strip() + if url[:1] == '<' and url[-1:] == '>': + url = url[1:-1].strip() + if url[:4] == 'URL:': url = url[4:].strip() + return url + +_typeprog = None +def splittype(url): + """splittype('type:opaquestring') --> 'type', 'opaquestring'.""" + global _typeprog + if _typeprog is None: + import re + _typeprog = re.compile('^([^/:]+):') + + match = _typeprog.match(url) + if match: + scheme = match.group(1) + return scheme.lower(), url[len(scheme) + 1:] + return None, url + +_hostprog = None +def splithost(url): + """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" + global _hostprog + if _hostprog is None: + import re + _hostprog = re.compile('^//([^/?]*)(.*)$') + + match = _hostprog.match(url) + if match: + host_port = match.group(1) + path = match.group(2) + if path and not path.startswith('/'): + path = '/' + path + return host_port, path + return None, url + +_userprog = None +def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +_passwdprog = None +def splitpasswd(user): + """splitpasswd('user:passwd') -> 'user', 'passwd'.""" + global _passwdprog + if _passwdprog is None: + import re + _passwdprog = re.compile('^([^:]*):(.*)$',re.S) + + match = _passwdprog.match(user) + if match: return match.group(1, 2) + return user, None + +# splittag('/path#tag') --> '/path', 'tag' +_portprog = None +def splitport(host): + """splitport('host:port') --> 'host', 'port'.""" + global _portprog + if _portprog is None: + import re + _portprog = re.compile('^(.*):([0-9]+)$') + + match = _portprog.match(host) + if match: return match.group(1, 2) + return host, None + +_nportprog = None +def splitnport(host, defport=-1): + """Split host and port, returning numeric port. + Return given default port if no ':' found; defaults to -1. + Return numerical port if a valid number are found after ':'. + Return None if ':' but not a valid number.""" + global _nportprog + if _nportprog is None: + import re + _nportprog = re.compile('^(.*):(.*)$') + + match = _nportprog.match(host) + if match: + host, port = match.group(1, 2) + try: + if not port: raise ValueError("no digits") + nport = int(port) + except ValueError: + nport = None + return host, nport + return host, defport + +_queryprog = None +def splitquery(url): + """splitquery('/path?query') --> '/path', 'query'.""" + global _queryprog + if _queryprog is None: + import re + _queryprog = re.compile('^(.*)\?([^?]*)$') + + match = _queryprog.match(url) + if match: return match.group(1, 2) + return url, None + +_tagprog = None +def splittag(url): + """splittag('/path#tag') --> '/path', 'tag'.""" + global _tagprog + if _tagprog is None: + import re + _tagprog = re.compile('^(.*)#([^#]*)$') + + match = _tagprog.match(url) + if match: return match.group(1, 2) + return url, None + +def splitattr(url): + """splitattr('/path;attr1=value1;attr2=value2;...') -> + '/path', ['attr1=value1', 'attr2=value2', ...].""" + words = url.split(';') + return words[0], words[1:] + +_valueprog = None +def splitvalue(attr): + """splitvalue('attr=value') --> 'attr', 'value'.""" + global _valueprog + if _valueprog is None: + import re + _valueprog = re.compile('^([^=]*)=(.*)$') + + match = _valueprog.match(attr) + if match: return match.group(1, 2) + return attr, None diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/request.py b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/request.py new file mode 100644 index 000000000..b1545ca0f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/request.py @@ -0,0 +1,2641 @@ +""" +Ported using Python-Future from the Python 3.3 standard library. + +An extensible library for opening URLs using a variety of protocols + +The simplest way to use this module is to call the urlopen function, +which accepts a string containing a URL or a Request object (described +below). It opens the URL and returns the results as file-like +object; the returned object has some extra methods described below. + +The OpenerDirector manages a collection of Handler objects that do +all the actual work. Each Handler implements a particular protocol or +option. The OpenerDirector is a composite object that invokes the +Handlers needed to open the requested URL. For example, the +HTTPHandler performs HTTP GET and POST requests and deals with +non-error returns. The HTTPRedirectHandler automatically deals with +HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler +deals with digest authentication. + +urlopen(url, data=None) -- Basic usage is the same as original +urllib. pass the url and optionally data to post to an HTTP URL, and +get a file-like object back. One difference is that you can also pass +a Request instance instead of URL. Raises a URLError (subclass of +IOError); for HTTP errors, raises an HTTPError, which can also be +treated as a valid response. + +build_opener -- Function that creates a new OpenerDirector instance. +Will install the default handlers. Accepts one or more Handlers as +arguments, either instances or Handler classes that it will +instantiate. If one of the argument is a subclass of the default +handler, the argument will be installed instead of the default. + +install_opener -- Installs a new opener as the default opener. + +objects of interest: + +OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages +the Handler classes, while dealing with requests and responses. + +Request -- An object that encapsulates the state of a request. The +state can be as simple as the URL. It can also include extra HTTP +headers, e.g. a User-Agent. + +BaseHandler -- + +internals: +BaseHandler and parent +_call_chain conventions + +Example usage: + +import urllib.request + +# set up authentication info +authinfo = urllib.request.HTTPBasicAuthHandler() +authinfo.add_password(realm='PDQ Application', + uri='https://mahler:8092/site-updates.py', + user='klem', + passwd='geheim$parole') + +proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) + +# build a new opener that adds authentication and caching FTP handlers +opener = urllib.request.build_opener(proxy_support, authinfo, + urllib.request.CacheFTPHandler) + +# install it +urllib.request.install_opener(opener) + +f = urllib.request.urlopen('http://www.python.org/') +""" + +# XXX issues: +# If an authentication error handler that tries to perform +# authentication for some reason but fails, how should the error be +# signalled? The client needs to know the HTTP error code. But if +# the handler knows that the problem was, e.g., that it didn't know +# that hash algo that requested in the challenge, it would be good to +# pass that information along to the client, too. +# ftp errors aren't handled cleanly +# check digest against correct (i.e. non-apache) implementation + +# Possible extensions: +# complex proxies XXX not sure what exactly was meant by this +# abstract factory for opener + +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import bytes, dict, filter, input, int, map, open, str +from future.utils import PY2, PY3, raise_with_traceback + +import base64 +import bisect +import hashlib +import array + +from future.backports import email +from future.backports.http import client as http_client +from .error import URLError, HTTPError, ContentTooShortError +from .parse import ( + urlparse, urlsplit, urljoin, unwrap, quote, unquote, + splittype, splithost, splitport, splituser, splitpasswd, + splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) +from .response import addinfourl, addclosehook + +import io +import os +import posixpath +import re +import socket +import sys +import time +import collections +import tempfile +import contextlib +import warnings + +# check for SSL +try: + import ssl + # Not available in the SSL module in Py2: + from ssl import SSLContext +except ImportError: + _have_ssl = False +else: + _have_ssl = True + +__all__ = [ + # Classes + 'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler', + 'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler', + 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', + 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', + 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', + 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', + 'UnknownHandler', 'HTTPErrorProcessor', + # Functions + 'urlopen', 'install_opener', 'build_opener', + 'pathname2url', 'url2pathname', 'getproxies', + # Legacy interface + 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', +] + +# used in User-Agent header sent +__version__ = sys.version[:3] + +_opener = None +def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs): + if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault'] + else: cadefault = False + if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath'] + else: capath = None + if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile'] + else: cafile = None + global _opener + if cafile or capath or cadefault: + if not _have_ssl: + raise ValueError('SSL support not available') + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + context.verify_mode = ssl.CERT_REQUIRED + if cafile or capath: + context.load_verify_locations(cafile, capath) + else: + context.set_default_verify_paths() + https_handler = HTTPSHandler(context=context, check_hostname=True) + opener = build_opener(https_handler) + elif _opener is None: + _opener = opener = build_opener() + else: + opener = _opener + return opener.open(url, data, timeout) + +def install_opener(opener): + global _opener + _opener = opener + +_url_tempfiles = [] +def urlretrieve(url, filename=None, reporthook=None, data=None): + """ + Retrieve a URL into a temporary location on disk. + + Requires a URL argument. If a filename is passed, it is used as + the temporary file location. The reporthook argument should be + a callable that accepts a block number, a read size, and the + total file size of the URL target. The data argument should be + valid URL encoded data. + + If a filename is passed and the URL points to a local resource, + the result is a copy from local file to new file. + + Returns a tuple containing the path to the newly created + data file as well as the resulting HTTPMessage object. + """ + url_type, path = splittype(url) + + with contextlib.closing(urlopen(url, data)) as fp: + headers = fp.info() + + # Just return the local path and the "headers" for file:// + # URLs. No sense in performing a copy unless requested. + if url_type == "file" and not filename: + return os.path.normpath(path), headers + + # Handle temporary file setup. + if filename: + tfp = open(filename, 'wb') + else: + tfp = tempfile.NamedTemporaryFile(delete=False) + filename = tfp.name + _url_tempfiles.append(filename) + + with tfp: + result = filename, headers + bs = 1024*8 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + + if reporthook: + reporthook(blocknum, bs, size) + + while True: + block = fp.read(bs) + if not block: + break + read += len(block) + tfp.write(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, bs, size) + + if size >= 0 and read < size: + raise ContentTooShortError( + "retrieval incomplete: got only %i out of %i bytes" + % (read, size), result) + + return result + +def urlcleanup(): + for temp_file in _url_tempfiles: + try: + os.unlink(temp_file) + except EnvironmentError: + pass + + del _url_tempfiles[:] + global _opener + if _opener: + _opener = None + +if PY3: + _cut_port_re = re.compile(r":\d+$", re.ASCII) +else: + _cut_port_re = re.compile(r":\d+$") + +def request_host(request): + + """Return request-host, as defined by RFC 2965. + + Variation from RFC: returned value is lowercased, for convenient + comparison. + + """ + url = request.full_url + host = urlparse(url)[1] + if host == "": + host = request.get_header("Host", "") + + # remove port, if present + host = _cut_port_re.sub("", host, 1) + return host.lower() + +class Request(object): + + def __init__(self, url, data=None, headers={}, + origin_req_host=None, unverifiable=False, + method=None): + # unwrap('') --> 'type://host/path' + self.full_url = unwrap(url) + self.full_url, self.fragment = splittag(self.full_url) + self.data = data + self.headers = {} + self._tunnel_host = None + for key, value in headers.items(): + self.add_header(key, value) + self.unredirected_hdrs = {} + if origin_req_host is None: + origin_req_host = request_host(self) + self.origin_req_host = origin_req_host + self.unverifiable = unverifiable + self.method = method + self._parse() + + def _parse(self): + self.type, rest = splittype(self.full_url) + if self.type is None: + raise ValueError("unknown url type: %r" % self.full_url) + self.host, self.selector = splithost(rest) + if self.host: + self.host = unquote(self.host) + + def get_method(self): + """Return a string indicating the HTTP request method.""" + if self.method is not None: + return self.method + elif self.data is not None: + return "POST" + else: + return "GET" + + def get_full_url(self): + if self.fragment: + return '%s#%s' % (self.full_url, self.fragment) + else: + return self.full_url + + # Begin deprecated methods + + def add_data(self, data): + msg = "Request.add_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + self.data = data + + def has_data(self): + msg = "Request.has_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.data is not None + + def get_data(self): + msg = "Request.get_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.data + + def get_type(self): + msg = "Request.get_type method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.type + + def get_host(self): + msg = "Request.get_host method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.host + + def get_selector(self): + msg = "Request.get_selector method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.selector + + def is_unverifiable(self): + msg = "Request.is_unverifiable method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.unverifiable + + def get_origin_req_host(self): + msg = "Request.get_origin_req_host method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.origin_req_host + + # End deprecated methods + + def set_proxy(self, host, type): + if self.type == 'https' and not self._tunnel_host: + self._tunnel_host = self.host + else: + self.type= type + self.selector = self.full_url + self.host = host + + def has_proxy(self): + return self.selector == self.full_url + + def add_header(self, key, val): + # useful for something like authentication + self.headers[key.capitalize()] = val + + def add_unredirected_header(self, key, val): + # will not be added to a redirected request + self.unredirected_hdrs[key.capitalize()] = val + + def has_header(self, header_name): + return (header_name in self.headers or + header_name in self.unredirected_hdrs) + + def get_header(self, header_name, default=None): + return self.headers.get( + header_name, + self.unredirected_hdrs.get(header_name, default)) + + def header_items(self): + hdrs = self.unredirected_hdrs.copy() + hdrs.update(self.headers) + return list(hdrs.items()) + +class OpenerDirector(object): + def __init__(self): + client_version = "Python-urllib/%s" % __version__ + self.addheaders = [('User-agent', client_version)] + # self.handlers is retained only for backward compatibility + self.handlers = [] + # manage the individual handlers + self.handle_open = {} + self.handle_error = {} + self.process_response = {} + self.process_request = {} + + def add_handler(self, handler): + if not hasattr(handler, "add_parent"): + raise TypeError("expected BaseHandler instance, got %r" % + type(handler)) + + added = False + for meth in dir(handler): + if meth in ["redirect_request", "do_open", "proxy_open"]: + # oops, coincidental match + continue + + i = meth.find("_") + protocol = meth[:i] + condition = meth[i+1:] + + if condition.startswith("error"): + j = condition.find("_") + i + 1 + kind = meth[j+1:] + try: + kind = int(kind) + except ValueError: + pass + lookup = self.handle_error.get(protocol, {}) + self.handle_error[protocol] = lookup + elif condition == "open": + kind = protocol + lookup = self.handle_open + elif condition == "response": + kind = protocol + lookup = self.process_response + elif condition == "request": + kind = protocol + lookup = self.process_request + else: + continue + + handlers = lookup.setdefault(kind, []) + if handlers: + bisect.insort(handlers, handler) + else: + handlers.append(handler) + added = True + + if added: + bisect.insort(self.handlers, handler) + handler.add_parent(self) + + def close(self): + # Only exists for backwards compatibility. + pass + + def _call_chain(self, chain, kind, meth_name, *args): + # Handlers raise an exception if no one else should try to handle + # the request, or return None if they can't but another handler + # could. Otherwise, they return the response. + handlers = chain.get(kind, ()) + for handler in handlers: + func = getattr(handler, meth_name) + result = func(*args) + if result is not None: + return result + + def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): + """ + Accept a URL or a Request object + + Python-Future: if the URL is passed as a byte-string, decode it first. + """ + if isinstance(fullurl, bytes): + fullurl = fullurl.decode() + if isinstance(fullurl, str): + req = Request(fullurl, data) + else: + req = fullurl + if data is not None: + req.data = data + + req.timeout = timeout + protocol = req.type + + # pre-process request + meth_name = protocol+"_request" + for processor in self.process_request.get(protocol, []): + meth = getattr(processor, meth_name) + req = meth(req) + + response = self._open(req, data) + + # post-process response + meth_name = protocol+"_response" + for processor in self.process_response.get(protocol, []): + meth = getattr(processor, meth_name) + response = meth(req, response) + + return response + + def _open(self, req, data=None): + result = self._call_chain(self.handle_open, 'default', + 'default_open', req) + if result: + return result + + protocol = req.type + result = self._call_chain(self.handle_open, protocol, protocol + + '_open', req) + if result: + return result + + return self._call_chain(self.handle_open, 'unknown', + 'unknown_open', req) + + def error(self, proto, *args): + if proto in ('http', 'https'): + # XXX http[s] protocols are special-cased + dict = self.handle_error['http'] # https is not different than http + proto = args[2] # YUCK! + meth_name = 'http_error_%s' % proto + http_err = 1 + orig_args = args + else: + dict = self.handle_error + meth_name = proto + '_error' + http_err = 0 + args = (dict, proto, meth_name) + args + result = self._call_chain(*args) + if result: + return result + + if http_err: + args = (dict, 'default', 'http_error_default') + orig_args + return self._call_chain(*args) + +# XXX probably also want an abstract factory that knows when it makes +# sense to skip a superclass in favor of a subclass and when it might +# make sense to include both + +def build_opener(*handlers): + """Create an opener object from a list of handlers. + + The opener will use several default handlers, including support + for HTTP, FTP and when applicable HTTPS. + + If any of the handlers passed as arguments are subclasses of the + default handlers, the default handlers will not be used. + """ + def isclass(obj): + return isinstance(obj, type) or hasattr(obj, "__bases__") + + opener = OpenerDirector() + default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, + HTTPDefaultErrorHandler, HTTPRedirectHandler, + FTPHandler, FileHandler, HTTPErrorProcessor] + if hasattr(http_client, "HTTPSConnection"): + default_classes.append(HTTPSHandler) + skip = set() + for klass in default_classes: + for check in handlers: + if isclass(check): + if issubclass(check, klass): + skip.add(klass) + elif isinstance(check, klass): + skip.add(klass) + for klass in skip: + default_classes.remove(klass) + + for klass in default_classes: + opener.add_handler(klass()) + + for h in handlers: + if isclass(h): + h = h() + opener.add_handler(h) + return opener + +class BaseHandler(object): + handler_order = 500 + + def add_parent(self, parent): + self.parent = parent + + def close(self): + # Only exists for backwards compatibility + pass + + def __lt__(self, other): + if not hasattr(other, "handler_order"): + # Try to preserve the old behavior of having custom classes + # inserted after default ones (works only for custom user + # classes which are not aware of handler_order). + return True + return self.handler_order < other.handler_order + + +class HTTPErrorProcessor(BaseHandler): + """Process HTTP error responses.""" + handler_order = 1000 # after all other processing + + def http_response(self, request, response): + code, msg, hdrs = response.code, response.msg, response.info() + + # According to RFC 2616, "2xx" code indicates that the client's + # request was successfully received, understood, and accepted. + if not (200 <= code < 300): + response = self.parent.error( + 'http', request, response, code, msg, hdrs) + + return response + + https_response = http_response + +class HTTPDefaultErrorHandler(BaseHandler): + def http_error_default(self, req, fp, code, msg, hdrs): + raise HTTPError(req.full_url, code, msg, hdrs, fp) + +class HTTPRedirectHandler(BaseHandler): + # maximum number of redirections to any single URL + # this is needed because of the state that cookies introduce + max_repeats = 4 + # maximum total number of redirections (regardless of URL) before + # assuming we're in a loop + max_redirections = 10 + + def redirect_request(self, req, fp, code, msg, headers, newurl): + """Return a Request or None in response to a redirect. + + This is called by the http_error_30x methods when a + redirection response is received. If a redirection should + take place, return a new Request to allow http_error_30x to + perform the redirect. Otherwise, raise HTTPError if no-one + else should try to handle this url. Return None if you can't + but another Handler might. + """ + m = req.get_method() + if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD") + or code in (301, 302, 303) and m == "POST")): + raise HTTPError(req.full_url, code, msg, headers, fp) + + # Strictly (according to RFC 2616), 301 or 302 in response to + # a POST MUST NOT cause a redirection without confirmation + # from the user (of urllib.request, in this case). In practice, + # essentially all clients do redirect in this case, so we do + # the same. + # be conciliant with URIs containing a space + newurl = newurl.replace(' ', '%20') + CONTENT_HEADERS = ("content-length", "content-type") + newheaders = dict((k, v) for k, v in req.headers.items() + if k.lower() not in CONTENT_HEADERS) + return Request(newurl, + headers=newheaders, + origin_req_host=req.origin_req_host, + unverifiable=True) + + # Implementation note: To avoid the server sending us into an + # infinite loop, the request object needs to track what URLs we + # have already seen. Do this by adding a handler-specific + # attribute to the Request object. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + if "location" in headers: + newurl = headers["location"] + elif "uri" in headers: + newurl = headers["uri"] + else: + return + + # fix a possible malformed URL + urlparts = urlparse(newurl) + + # For security reasons we don't allow redirection to anything other + # than http, https or ftp. + + if urlparts.scheme not in ('http', 'https', 'ftp', ''): + raise HTTPError( + newurl, code, + "%s - Redirection to url '%s' is not allowed" % (msg, newurl), + headers, fp) + + if not urlparts.path: + urlparts = list(urlparts) + urlparts[2] = "/" + newurl = urlunparse(urlparts) + + newurl = urljoin(req.full_url, newurl) + + # XXX Probably want to forget about the state of the current + # request, although that might interact poorly with other + # handlers that also use handler-specific request attributes + new = self.redirect_request(req, fp, code, msg, headers, newurl) + if new is None: + return + + # loop detection + # .redirect_dict has a key url if url was previously visited. + if hasattr(req, 'redirect_dict'): + visited = new.redirect_dict = req.redirect_dict + if (visited.get(newurl, 0) >= self.max_repeats or + len(visited) >= self.max_redirections): + raise HTTPError(req.full_url, code, + self.inf_msg + msg, headers, fp) + else: + visited = new.redirect_dict = req.redirect_dict = {} + visited[newurl] = visited.get(newurl, 0) + 1 + + # Don't close the fp until we are sure that we won't use it + # with HTTPError. + fp.read() + fp.close() + + return self.parent.open(new, timeout=req.timeout) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + + inf_msg = "The HTTP server returned a redirect error that would " \ + "lead to an infinite loop.\n" \ + "The last 30x error message was:\n" + + +def _parse_proxy(proxy): + """Return (scheme, user, password, host/port) given a URL or an authority. + + If a URL is supplied, it must have an authority (host:port) component. + According to RFC 3986, having an authority component means the URL must + have two slashes after the scheme: + + >>> _parse_proxy('file:/ftp.example.com/') + Traceback (most recent call last): + ValueError: proxy URL with no authority: 'file:/ftp.example.com/' + + The first three items of the returned tuple may be None. + + Examples of authority parsing: + + >>> _parse_proxy('proxy.example.com') + (None, None, None, 'proxy.example.com') + >>> _parse_proxy('proxy.example.com:3128') + (None, None, None, 'proxy.example.com:3128') + + The authority component may optionally include userinfo (assumed to be + username:password): + + >>> _parse_proxy('joe:password@proxy.example.com') + (None, 'joe', 'password', 'proxy.example.com') + >>> _parse_proxy('joe:password@proxy.example.com:3128') + (None, 'joe', 'password', 'proxy.example.com:3128') + + Same examples, but with URLs instead: + + >>> _parse_proxy('http://proxy.example.com/') + ('http', None, None, 'proxy.example.com') + >>> _parse_proxy('http://proxy.example.com:3128/') + ('http', None, None, 'proxy.example.com:3128') + >>> _parse_proxy('http://joe:password@proxy.example.com/') + ('http', 'joe', 'password', 'proxy.example.com') + >>> _parse_proxy('http://joe:password@proxy.example.com:3128') + ('http', 'joe', 'password', 'proxy.example.com:3128') + + Everything after the authority is ignored: + + >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128') + ('ftp', 'joe', 'password', 'proxy.example.com') + + Test for no trailing '/' case: + + >>> _parse_proxy('http://joe:password@proxy.example.com') + ('http', 'joe', 'password', 'proxy.example.com') + + """ + scheme, r_scheme = splittype(proxy) + if not r_scheme.startswith("/"): + # authority + scheme = None + authority = proxy + else: + # URL + if not r_scheme.startswith("//"): + raise ValueError("proxy URL with no authority: %r" % proxy) + # We have an authority, so for RFC 3986-compliant URLs (by ss 3. + # and 3.3.), path is empty or starts with '/' + end = r_scheme.find("/", 2) + if end == -1: + end = None + authority = r_scheme[2:end] + userinfo, hostport = splituser(authority) + if userinfo is not None: + user, password = splitpasswd(userinfo) + else: + user = password = None + return scheme, user, password, hostport + +class ProxyHandler(BaseHandler): + # Proxies must be in front + handler_order = 100 + + def __init__(self, proxies=None): + if proxies is None: + proxies = getproxies() + assert hasattr(proxies, 'keys'), "proxies must be a mapping" + self.proxies = proxies + for type, url in proxies.items(): + setattr(self, '%s_open' % type, + lambda r, proxy=url, type=type, meth=self.proxy_open: + meth(r, proxy, type)) + + def proxy_open(self, req, proxy, type): + orig_type = req.type + proxy_type, user, password, hostport = _parse_proxy(proxy) + if proxy_type is None: + proxy_type = orig_type + + if req.host and proxy_bypass(req.host): + return None + + if user and password: + user_pass = '%s:%s' % (unquote(user), + unquote(password)) + creds = base64.b64encode(user_pass.encode()).decode("ascii") + req.add_header('Proxy-authorization', 'Basic ' + creds) + hostport = unquote(hostport) + req.set_proxy(hostport, proxy_type) + if orig_type == proxy_type or orig_type == 'https': + # let other handlers take care of it + return None + else: + # need to start over, because the other handlers don't + # grok the proxy's URL type + # e.g. if we have a constructor arg proxies like so: + # {'http': 'ftp://proxy.example.com'}, we may end up turning + # a request for http://acme.example.com/a into one for + # ftp://proxy.example.com/a + return self.parent.open(req, timeout=req.timeout) + +class HTTPPasswordMgr(object): + + def __init__(self): + self.passwd = {} + + def add_password(self, realm, uri, user, passwd): + # uri could be a single URI or a sequence + if isinstance(uri, str): + uri = [uri] + if realm not in self.passwd: + self.passwd[realm] = {} + for default_port in True, False: + reduced_uri = tuple( + [self.reduce_uri(u, default_port) for u in uri]) + self.passwd[realm][reduced_uri] = (user, passwd) + + def find_user_password(self, realm, authuri): + domains = self.passwd.get(realm, {}) + for default_port in True, False: + reduced_authuri = self.reduce_uri(authuri, default_port) + for uris, authinfo in domains.items(): + for uri in uris: + if self.is_suburi(uri, reduced_authuri): + return authinfo + return None, None + + def reduce_uri(self, uri, default_port=True): + """Accept authority or URI and extract only the authority and path.""" + # note HTTP URLs do not have a userinfo component + parts = urlsplit(uri) + if parts[1]: + # URI + scheme = parts[0] + authority = parts[1] + path = parts[2] or '/' + else: + # host or host:port + scheme = None + authority = uri + path = '/' + host, port = splitport(authority) + if default_port and port is None and scheme is not None: + dport = {"http": 80, + "https": 443, + }.get(scheme) + if dport is not None: + authority = "%s:%d" % (host, dport) + return authority, path + + def is_suburi(self, base, test): + """Check if test is below base in a URI tree + + Both args must be URIs in reduced form. + """ + if base == test: + return True + if base[0] != test[0]: + return False + common = posixpath.commonprefix((base[1], test[1])) + if len(common) == len(base[1]): + return True + return False + + +class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): + + def find_user_password(self, realm, authuri): + user, password = HTTPPasswordMgr.find_user_password(self, realm, + authuri) + if user is not None: + return user, password + return HTTPPasswordMgr.find_user_password(self, None, authuri) + + +class AbstractBasicAuthHandler(object): + + # XXX this allows for multiple auth-schemes, but will stupidly pick + # the last one with a realm specified. + + # allow for double- and single-quoted realm values + # (single quotes are a violation of the RFC, but appear in the wild) + rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+' + 'realm=(["\']?)([^"\']*)\\2', re.I) + + # XXX could pre-emptively send auth info already accepted (RFC 2617, + # end of section 2, and section 1.2 immediately after "credentials" + # production). + + def __init__(self, password_mgr=None): + if password_mgr is None: + password_mgr = HTTPPasswordMgr() + self.passwd = password_mgr + self.add_password = self.passwd.add_password + self.retried = 0 + + def reset_retry_count(self): + self.retried = 0 + + def http_error_auth_reqed(self, authreq, host, req, headers): + # host may be an authority (without userinfo) or a URL with an + # authority + # XXX could be multiple headers + authreq = headers.get(authreq, None) + + if self.retried > 5: + # retry sending the username:password 5 times before failing. + raise HTTPError(req.get_full_url(), 401, "basic auth failed", + headers, None) + else: + self.retried += 1 + + if authreq: + scheme = authreq.split()[0] + if scheme.lower() != 'basic': + raise ValueError("AbstractBasicAuthHandler does not" + " support the following scheme: '%s'" % + scheme) + else: + mo = AbstractBasicAuthHandler.rx.search(authreq) + if mo: + scheme, quote, realm = mo.groups() + if quote not in ['"',"'"]: + warnings.warn("Basic Auth Realm was unquoted", + UserWarning, 2) + if scheme.lower() == 'basic': + response = self.retry_http_basic_auth(host, req, realm) + if response and response.code != 401: + self.retried = 0 + return response + + def retry_http_basic_auth(self, host, req, realm): + user, pw = self.passwd.find_user_password(realm, host) + if pw is not None: + raw = "%s:%s" % (user, pw) + auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii") + if req.headers.get(self.auth_header, None) == auth: + return None + req.add_unredirected_header(self.auth_header, auth) + return self.parent.open(req, timeout=req.timeout) + else: + return None + + +class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + + auth_header = 'Authorization' + + def http_error_401(self, req, fp, code, msg, headers): + url = req.full_url + response = self.http_error_auth_reqed('www-authenticate', + url, req, headers) + self.reset_retry_count() + return response + + +class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + + auth_header = 'Proxy-authorization' + + def http_error_407(self, req, fp, code, msg, headers): + # http_error_auth_reqed requires that there is no userinfo component in + # authority. Assume there isn't one, since urllib.request does not (and + # should not, RFC 3986 s. 3.2.1) support requests for URLs containing + # userinfo. + authority = req.host + response = self.http_error_auth_reqed('proxy-authenticate', + authority, req, headers) + self.reset_retry_count() + return response + + +# Return n random bytes. +_randombytes = os.urandom + + +class AbstractDigestAuthHandler(object): + # Digest authentication is specified in RFC 2617. + + # XXX The client does not inspect the Authentication-Info header + # in a successful response. + + # XXX It should be possible to test this implementation against + # a mock server that just generates a static set of challenges. + + # XXX qop="auth-int" supports is shaky + + def __init__(self, passwd=None): + if passwd is None: + passwd = HTTPPasswordMgr() + self.passwd = passwd + self.add_password = self.passwd.add_password + self.retried = 0 + self.nonce_count = 0 + self.last_nonce = None + + def reset_retry_count(self): + self.retried = 0 + + def http_error_auth_reqed(self, auth_header, host, req, headers): + authreq = headers.get(auth_header, None) + if self.retried > 5: + # Don't fail endlessly - if we failed once, we'll probably + # fail a second time. Hm. Unless the Password Manager is + # prompting for the information. Crap. This isn't great + # but it's better than the current 'repeat until recursion + # depth exceeded' approach + raise HTTPError(req.full_url, 401, "digest auth failed", + headers, None) + else: + self.retried += 1 + if authreq: + scheme = authreq.split()[0] + if scheme.lower() == 'digest': + return self.retry_http_digest_auth(req, authreq) + elif scheme.lower() != 'basic': + raise ValueError("AbstractDigestAuthHandler does not support" + " the following scheme: '%s'" % scheme) + + def retry_http_digest_auth(self, req, auth): + token, challenge = auth.split(' ', 1) + chal = parse_keqv_list(filter(None, parse_http_list(challenge))) + auth = self.get_authorization(req, chal) + if auth: + auth_val = 'Digest %s' % auth + if req.headers.get(self.auth_header, None) == auth_val: + return None + req.add_unredirected_header(self.auth_header, auth_val) + resp = self.parent.open(req, timeout=req.timeout) + return resp + + def get_cnonce(self, nonce): + # The cnonce-value is an opaque + # quoted string value provided by the client and used by both client + # and server to avoid chosen plaintext attacks, to provide mutual + # authentication, and to provide some message integrity protection. + # This isn't a fabulous effort, but it's probably Good Enough. + s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime()) + b = s.encode("ascii") + _randombytes(8) + dig = hashlib.sha1(b).hexdigest() + return dig[:16] + + def get_authorization(self, req, chal): + try: + realm = chal['realm'] + nonce = chal['nonce'] + qop = chal.get('qop') + algorithm = chal.get('algorithm', 'MD5') + # mod_digest doesn't send an opaque, even though it isn't + # supposed to be optional + opaque = chal.get('opaque', None) + except KeyError: + return None + + H, KD = self.get_algorithm_impls(algorithm) + if H is None: + return None + + user, pw = self.passwd.find_user_password(realm, req.full_url) + if user is None: + return None + + # XXX not implemented yet + if req.data is not None: + entdig = self.get_entity_digest(req.data, chal) + else: + entdig = None + + A1 = "%s:%s:%s" % (user, realm, pw) + A2 = "%s:%s" % (req.get_method(), + # XXX selector: what about proxies and full urls + req.selector) + if qop == 'auth': + if nonce == self.last_nonce: + self.nonce_count += 1 + else: + self.nonce_count = 1 + self.last_nonce = nonce + ncvalue = '%08x' % self.nonce_count + cnonce = self.get_cnonce(nonce) + noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) + respdig = KD(H(A1), noncebit) + elif qop is None: + respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) + else: + # XXX handle auth-int. + raise URLError("qop '%s' is not supported." % qop) + + # XXX should the partial digests be encoded too? + + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (user, realm, nonce, req.selector, + respdig) + if opaque: + base += ', opaque="%s"' % opaque + if entdig: + base += ', digest="%s"' % entdig + base += ', algorithm="%s"' % algorithm + if qop: + base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) + return base + + def get_algorithm_impls(self, algorithm): + # lambdas assume digest modules are imported at the top level + if algorithm == 'MD5': + H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest() + elif algorithm == 'SHA': + H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest() + # XXX MD5-sess + KD = lambda s, d: H("%s:%s" % (s, d)) + return H, KD + + def get_entity_digest(self, data, chal): + # XXX not implemented yet + return None + + +class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + """An authentication protocol defined by RFC 2069 + + Digest authentication improves on basic authentication because it + does not transmit passwords in the clear. + """ + + auth_header = 'Authorization' + handler_order = 490 # before Basic auth + + def http_error_401(self, req, fp, code, msg, headers): + host = urlparse(req.full_url)[1] + retry = self.http_error_auth_reqed('www-authenticate', + host, req, headers) + self.reset_retry_count() + return retry + + +class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + + auth_header = 'Proxy-Authorization' + handler_order = 490 # before Basic auth + + def http_error_407(self, req, fp, code, msg, headers): + host = req.host + retry = self.http_error_auth_reqed('proxy-authenticate', + host, req, headers) + self.reset_retry_count() + return retry + +class AbstractHTTPHandler(BaseHandler): + + def __init__(self, debuglevel=0): + self._debuglevel = debuglevel + + def set_http_debuglevel(self, level): + self._debuglevel = level + + def do_request_(self, request): + host = request.host + if not host: + raise URLError('no host given') + + if request.data is not None: # POST + data = request.data + if isinstance(data, str): + msg = "POST data should be bytes or an iterable of bytes. " \ + "It cannot be of type str." + raise TypeError(msg) + if not request.has_header('Content-type'): + request.add_unredirected_header( + 'Content-type', + 'application/x-www-form-urlencoded') + if not request.has_header('Content-length'): + size = None + try: + ### For Python-Future: + if PY2 and isinstance(data, array.array): + # memoryviews of arrays aren't supported + # in Py2.7. (e.g. memoryview(array.array('I', + # [1, 2, 3, 4])) raises a TypeError.) + # So we calculate the size manually instead: + size = len(data) * data.itemsize + ### + else: + mv = memoryview(data) + size = len(mv) * mv.itemsize + except TypeError: + if isinstance(data, collections.Iterable): + raise ValueError("Content-Length should be specified " + "for iterable data of type %r %r" % (type(data), + data)) + else: + request.add_unredirected_header( + 'Content-length', '%d' % size) + + sel_host = host + if request.has_proxy(): + scheme, sel = splittype(request.selector) + sel_host, sel_path = splithost(sel) + if not request.has_header('Host'): + request.add_unredirected_header('Host', sel_host) + for name, value in self.parent.addheaders: + name = name.capitalize() + if not request.has_header(name): + request.add_unredirected_header(name, value) + + return request + + def do_open(self, http_class, req, **http_conn_args): + """Return an HTTPResponse object for the request, using http_class. + + http_class must implement the HTTPConnection API from http.client. + """ + host = req.host + if not host: + raise URLError('no host given') + + # will parse host:port + h = http_class(host, timeout=req.timeout, **http_conn_args) + + headers = dict(req.unredirected_hdrs) + headers.update(dict((k, v) for k, v in req.headers.items() + if k not in headers)) + + # TODO(jhylton): Should this be redesigned to handle + # persistent connections? + + # We want to make an HTTP/1.1 request, but the addinfourl + # class isn't prepared to deal with a persistent connection. + # It will try to read all remaining data from the socket, + # which will block while the server waits for the next request. + # So make sure the connection gets closed after the (only) + # request. + headers["Connection"] = "close" + headers = dict((name.title(), val) for name, val in headers.items()) + + if req._tunnel_host: + tunnel_headers = {} + proxy_auth_hdr = "Proxy-Authorization" + if proxy_auth_hdr in headers: + tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] + # Proxy-Authorization should not be sent to origin + # server. + del headers[proxy_auth_hdr] + h.set_tunnel(req._tunnel_host, headers=tunnel_headers) + + try: + h.request(req.get_method(), req.selector, req.data, headers) + except socket.error as err: # timeout error + h.close() + raise URLError(err) + else: + r = h.getresponse() + # If the server does not send us a 'Connection: close' header, + # HTTPConnection assumes the socket should be left open. Manually + # mark the socket to be closed when this response object goes away. + if h.sock: + h.sock.close() + h.sock = None + + + r.url = req.get_full_url() + # This line replaces the .msg attribute of the HTTPResponse + # with .headers, because urllib clients expect the response to + # have the reason in .msg. It would be good to mark this + # attribute is deprecated and get then to use info() or + # .headers. + r.msg = r.reason + return r + + +class HTTPHandler(AbstractHTTPHandler): + + def http_open(self, req): + return self.do_open(http_client.HTTPConnection, req) + + http_request = AbstractHTTPHandler.do_request_ + +if hasattr(http_client, 'HTTPSConnection'): + + class HTTPSHandler(AbstractHTTPHandler): + + def __init__(self, debuglevel=0, context=None, check_hostname=None): + AbstractHTTPHandler.__init__(self, debuglevel) + self._context = context + self._check_hostname = check_hostname + + def https_open(self, req): + return self.do_open(http_client.HTTPSConnection, req, + context=self._context, check_hostname=self._check_hostname) + + https_request = AbstractHTTPHandler.do_request_ + + __all__.append('HTTPSHandler') + +class HTTPCookieProcessor(BaseHandler): + def __init__(self, cookiejar=None): + import future.backports.http.cookiejar as http_cookiejar + if cookiejar is None: + cookiejar = http_cookiejar.CookieJar() + self.cookiejar = cookiejar + + def http_request(self, request): + self.cookiejar.add_cookie_header(request) + return request + + def http_response(self, request, response): + self.cookiejar.extract_cookies(response, request) + return response + + https_request = http_request + https_response = http_response + +class UnknownHandler(BaseHandler): + def unknown_open(self, req): + type = req.type + raise URLError('unknown url type: %s' % type) + +def parse_keqv_list(l): + """Parse list of key=value strings where keys are not duplicated.""" + parsed = {} + for elt in l: + k, v = elt.split('=', 1) + if v[0] == '"' and v[-1] == '"': + v = v[1:-1] + parsed[k] = v + return parsed + +def parse_http_list(s): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Neither commas nor quotes count if they are escaped. + Only double-quotes count, not single-quotes. + """ + res = [] + part = '' + + escape = quote = False + for cur in s: + if escape: + part += cur + escape = False + continue + if quote: + if cur == '\\': + escape = True + continue + elif cur == '"': + quote = False + part += cur + continue + + if cur == ',': + res.append(part) + part = '' + continue + + if cur == '"': + quote = True + + part += cur + + # append last part + if part: + res.append(part) + + return [part.strip() for part in res] + +class FileHandler(BaseHandler): + # Use local file or FTP depending on form of URL + def file_open(self, req): + url = req.selector + if url[:2] == '//' and url[2:3] != '/' and (req.host and + req.host != 'localhost'): + if not req.host is self.get_names(): + raise URLError("file:// scheme is supported only on localhost") + else: + return self.open_local_file(req) + + # names for the localhost + names = None + def get_names(self): + if FileHandler.names is None: + try: + FileHandler.names = tuple( + socket.gethostbyname_ex('localhost')[2] + + socket.gethostbyname_ex(socket.gethostname())[2]) + except socket.gaierror: + FileHandler.names = (socket.gethostbyname('localhost'),) + return FileHandler.names + + # not entirely sure what the rules are here + def open_local_file(self, req): + import future.backports.email.utils as email_utils + import mimetypes + host = req.host + filename = req.selector + localfile = url2pathname(filename) + try: + stats = os.stat(localfile) + size = stats.st_size + modified = email_utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(filename)[0] + headers = email.message_from_string( + 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' % + (mtype or 'text/plain', size, modified)) + if host: + host, port = splitport(host) + if not host or \ + (not port and _safe_gethostbyname(host) in self.get_names()): + if host: + origurl = 'file://' + host + filename + else: + origurl = 'file://' + filename + return addinfourl(open(localfile, 'rb'), headers, origurl) + except OSError as exp: + # users shouldn't expect OSErrors coming from urlopen() + raise URLError(exp) + raise URLError('file not on local host') + +def _safe_gethostbyname(host): + try: + return socket.gethostbyname(host) + except socket.gaierror: + return None + +class FTPHandler(BaseHandler): + def ftp_open(self, req): + import ftplib + import mimetypes + host = req.host + if not host: + raise URLError('ftp error: no host given') + host, port = splitport(host) + if port is None: + port = ftplib.FTP_PORT + else: + port = int(port) + + # username/password handling + user, host = splituser(host) + if user: + user, passwd = splitpasswd(user) + else: + passwd = None + host = unquote(host) + user = user or '' + passwd = passwd or '' + + try: + host = socket.gethostbyname(host) + except socket.error as msg: + raise URLError(msg) + path, attrs = splitattr(req.selector) + dirs = path.split('/') + dirs = list(map(unquote, dirs)) + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: + dirs = dirs[1:] + try: + fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) + type = file and 'I' or 'D' + for attr in attrs: + attr, value = splitvalue(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + fp, retrlen = fw.retrfile(file, type) + headers = "" + mtype = mimetypes.guess_type(req.full_url)[0] + if mtype: + headers += "Content-type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, req.full_url) + except ftplib.all_errors as exp: + exc = URLError('ftp error: %r' % exp) + raise_with_traceback(exc) + + def connect_ftp(self, user, passwd, host, port, dirs, timeout): + return ftpwrapper(user, passwd, host, port, dirs, timeout, + persistent=False) + +class CacheFTPHandler(FTPHandler): + # XXX would be nice to have pluggable cache strategies + # XXX this stuff is definitely not thread safe + def __init__(self): + self.cache = {} + self.timeout = {} + self.soonest = 0 + self.delay = 60 + self.max_conns = 16 + + def setTimeout(self, t): + self.delay = t + + def setMaxConns(self, m): + self.max_conns = m + + def connect_ftp(self, user, passwd, host, port, dirs, timeout): + key = user, host, port, '/'.join(dirs), timeout + if key in self.cache: + self.timeout[key] = time.time() + self.delay + else: + self.cache[key] = ftpwrapper(user, passwd, host, port, + dirs, timeout) + self.timeout[key] = time.time() + self.delay + self.check_cache() + return self.cache[key] + + def check_cache(self): + # first check for old ones + t = time.time() + if self.soonest <= t: + for k, v in list(self.timeout.items()): + if v < t: + self.cache[k].close() + del self.cache[k] + del self.timeout[k] + self.soonest = min(list(self.timeout.values())) + + # then check the size + if len(self.cache) == self.max_conns: + for k, v in list(self.timeout.items()): + if v == self.soonest: + del self.cache[k] + del self.timeout[k] + break + self.soonest = min(list(self.timeout.values())) + + def clear_cache(self): + for conn in self.cache.values(): + conn.close() + self.cache.clear() + self.timeout.clear() + + +# Code move from the old urllib module + +MAXFTPCACHE = 10 # Trim the ftp cache beyond this size + +# Helper for non-unix systems +if os.name == 'nt': + from nturl2path import url2pathname, pathname2url +else: + def url2pathname(pathname): + """OS-specific conversion from a relative URL of the 'file' scheme + to a file system path; not recommended for general use.""" + return unquote(pathname) + + def pathname2url(pathname): + """OS-specific conversion from a file system path to a relative URL + of the 'file' scheme; not recommended for general use.""" + return quote(pathname) + +# This really consists of two pieces: +# (1) a class which handles opening of all sorts of URLs +# (plus assorted utilities etc.) +# (2) a set of functions for parsing URLs +# XXX Should these be separated out into different modules? + + +ftpcache = {} +class URLopener(object): + """Class to open URLs. + This is a class rather than just a subroutine because we may need + more than one set of global protocol-specific options. + Note -- this is a base class for those who don't want the + automatic handling of errors type 302 (relocated) and 401 + (authorization needed).""" + + __tempfiles = None + + version = "Python-urllib/%s" % __version__ + + # Constructor + def __init__(self, proxies=None, **x509): + msg = "%(class)s style of invoking requests is deprecated. " \ + "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} + warnings.warn(msg, DeprecationWarning, stacklevel=3) + if proxies is None: + proxies = getproxies() + assert hasattr(proxies, 'keys'), "proxies must be a mapping" + self.proxies = proxies + self.key_file = x509.get('key_file') + self.cert_file = x509.get('cert_file') + self.addheaders = [('User-Agent', self.version)] + self.__tempfiles = [] + self.__unlink = os.unlink # See cleanup() + self.tempcache = None + # Undocumented feature: if you assign {} to tempcache, + # it is used to cache files retrieved with + # self.retrieve(). This is not enabled by default + # since it does not work for changing documents (and I + # haven't got the logic to check expiration headers + # yet). + self.ftpcache = ftpcache + # Undocumented feature: you can use a different + # ftp cache by assigning to the .ftpcache member; + # in case you want logically independent URL openers + # XXX This is not threadsafe. Bah. + + def __del__(self): + self.close() + + def close(self): + self.cleanup() + + def cleanup(self): + # This code sometimes runs when the rest of this module + # has already been deleted, so it can't use any globals + # or import anything. + if self.__tempfiles: + for file in self.__tempfiles: + try: + self.__unlink(file) + except OSError: + pass + del self.__tempfiles[:] + if self.tempcache: + self.tempcache.clear() + + def addheader(self, *args): + """Add a header to be used by the HTTP interface only + e.g. u.addheader('Accept', 'sound/basic')""" + self.addheaders.append(args) + + # External interface + def open(self, fullurl, data=None): + """Use URLopener().open(file) instead of open(file, 'r').""" + fullurl = unwrap(to_bytes(fullurl)) + fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") + if self.tempcache and fullurl in self.tempcache: + filename, headers = self.tempcache[fullurl] + fp = open(filename, 'rb') + return addinfourl(fp, headers, fullurl) + urltype, url = splittype(fullurl) + if not urltype: + urltype = 'file' + if urltype in self.proxies: + proxy = self.proxies[urltype] + urltype, proxyhost = splittype(proxy) + host, selector = splithost(proxyhost) + url = (host, fullurl) # Signal special case to open_*() + else: + proxy = None + name = 'open_' + urltype + self.type = urltype + name = name.replace('-', '_') + if not hasattr(self, name): + if proxy: + return self.open_unknown_proxy(proxy, fullurl, data) + else: + return self.open_unknown(fullurl, data) + try: + if data is None: + return getattr(self, name)(url) + else: + return getattr(self, name)(url, data) + except HTTPError: + raise + except socket.error as msg: + raise_with_traceback(IOError('socket error', msg)) + + def open_unknown(self, fullurl, data=None): + """Overridable interface to open unknown URL type.""" + type, url = splittype(fullurl) + raise IOError('url error', 'unknown url type', type) + + def open_unknown_proxy(self, proxy, fullurl, data=None): + """Overridable interface to open unknown URL type.""" + type, url = splittype(fullurl) + raise IOError('url error', 'invalid proxy for %s' % type, proxy) + + # External interface + def retrieve(self, url, filename=None, reporthook=None, data=None): + """retrieve(url) returns (filename, headers) for a local object + or (tempfilename, headers) for a remote object.""" + url = unwrap(to_bytes(url)) + if self.tempcache and url in self.tempcache: + return self.tempcache[url] + type, url1 = splittype(url) + if filename is None and (not type or type == 'file'): + try: + fp = self.open_local_file(url1) + hdrs = fp.info() + fp.close() + return url2pathname(splithost(url1)[1]), hdrs + except IOError as msg: + pass + fp = self.open(url, data) + try: + headers = fp.info() + if filename: + tfp = open(filename, 'wb') + else: + import tempfile + garbage, path = splittype(url) + garbage, path = splithost(path or "") + path, garbage = splitquery(path or "") + path, garbage = splitattr(path or "") + suffix = os.path.splitext(path)[1] + (fd, filename) = tempfile.mkstemp(suffix) + self.__tempfiles.append(filename) + tfp = os.fdopen(fd, 'wb') + try: + result = filename, headers + if self.tempcache is not None: + self.tempcache[url] = result + bs = 1024*8 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, bs, size) + while 1: + block = fp.read(bs) + if not block: + break + read += len(block) + tfp.write(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, bs, size) + finally: + tfp.close() + finally: + fp.close() + + # raise exception if actual size does not match content-length header + if size >= 0 and read < size: + raise ContentTooShortError( + "retrieval incomplete: got only %i out of %i bytes" + % (read, size), result) + + return result + + # Each method named open_ knows how to open that type of URL + + def _open_generic_http(self, connection_factory, url, data): + """Make an HTTP connection using connection_class. + + This is an internal method that should be called from + open_http() or open_https(). + + Arguments: + - connection_factory should take a host name and return an + HTTPConnection instance. + - url is the url to retrieval or a host, relative-path pair. + - data is payload for a POST request or None. + """ + + user_passwd = None + proxy_passwd= None + if isinstance(url, str): + host, selector = splithost(url) + if host: + user_passwd, host = splituser(host) + host = unquote(host) + realhost = host + else: + host, selector = url + # check whether the proxy contains authorization information + proxy_passwd, host = splituser(host) + # now we proceed with the url we want to obtain + urltype, rest = splittype(selector) + url = rest + user_passwd = None + if urltype.lower() != 'http': + realhost = None + else: + realhost, rest = splithost(rest) + if realhost: + user_passwd, realhost = splituser(realhost) + if user_passwd: + selector = "%s://%s%s" % (urltype, realhost, rest) + if proxy_bypass(realhost): + host = realhost + + if not host: raise IOError('http error', 'no host given') + + if proxy_passwd: + proxy_passwd = unquote(proxy_passwd) + proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') + else: + proxy_auth = None + + if user_passwd: + user_passwd = unquote(user_passwd) + auth = base64.b64encode(user_passwd.encode()).decode('ascii') + else: + auth = None + http_conn = connection_factory(host) + headers = {} + if proxy_auth: + headers["Proxy-Authorization"] = "Basic %s" % proxy_auth + if auth: + headers["Authorization"] = "Basic %s" % auth + if realhost: + headers["Host"] = realhost + + # Add Connection:close as we don't support persistent connections yet. + # This helps in closing the socket and avoiding ResourceWarning + + headers["Connection"] = "close" + + for header, value in self.addheaders: + headers[header] = value + + if data is not None: + headers["Content-Type"] = "application/x-www-form-urlencoded" + http_conn.request("POST", selector, data, headers) + else: + http_conn.request("GET", selector, headers=headers) + + try: + response = http_conn.getresponse() + except http_client.BadStatusLine: + # something went wrong with the HTTP status line + raise URLError("http protocol error: bad status line") + + # According to RFC 2616, "2xx" code indicates that the client's + # request was successfully received, understood, and accepted. + if 200 <= response.status < 300: + return addinfourl(response, response.msg, "http:" + url, + response.status) + else: + return self.http_error( + url, response.fp, + response.status, response.reason, response.msg, data) + + def open_http(self, url, data=None): + """Use HTTP protocol.""" + return self._open_generic_http(http_client.HTTPConnection, url, data) + + def http_error(self, url, fp, errcode, errmsg, headers, data=None): + """Handle http errors. + + Derived class can override this, or provide specific handlers + named http_error_DDD where DDD is the 3-digit error code.""" + # First check if there's a specific handler for this error + name = 'http_error_%d' % errcode + if hasattr(self, name): + method = getattr(self, name) + if data is None: + result = method(url, fp, errcode, errmsg, headers) + else: + result = method(url, fp, errcode, errmsg, headers, data) + if result: return result + return self.http_error_default(url, fp, errcode, errmsg, headers) + + def http_error_default(self, url, fp, errcode, errmsg, headers): + """Default error handler: close the connection and raise IOError.""" + fp.close() + raise HTTPError(url, errcode, errmsg, headers, None) + + if _have_ssl: + def _https_connection(self, host): + return http_client.HTTPSConnection(host, + key_file=self.key_file, + cert_file=self.cert_file) + + def open_https(self, url, data=None): + """Use HTTPS protocol.""" + return self._open_generic_http(self._https_connection, url, data) + + def open_file(self, url): + """Use local file or FTP depending on form of URL.""" + if not isinstance(url, str): + raise URLError('file error: proxy support for file protocol currently not implemented') + if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': + raise ValueError("file:// scheme is supported only on localhost") + else: + return self.open_local_file(url) + + def open_local_file(self, url): + """Use local file.""" + import future.backports.email.utils as email_utils + import mimetypes + host, file = splithost(url) + localname = url2pathname(file) + try: + stats = os.stat(localname) + except OSError as e: + raise URLError(e.strerror, e.filename) + size = stats.st_size + modified = email_utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(url)[0] + headers = email.message_from_string( + 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % + (mtype or 'text/plain', size, modified)) + if not host: + urlfile = file + if file[:1] == '/': + urlfile = 'file://' + file + return addinfourl(open(localname, 'rb'), headers, urlfile) + host, port = splitport(host) + if (not port + and socket.gethostbyname(host) in ((localhost(),) + thishost())): + urlfile = file + if file[:1] == '/': + urlfile = 'file://' + file + elif file[:2] == './': + raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) + return addinfourl(open(localname, 'rb'), headers, urlfile) + raise URLError('local file error: not on local host') + + def open_ftp(self, url): + """Use FTP protocol.""" + if not isinstance(url, str): + raise URLError('ftp error: proxy support for ftp protocol currently not implemented') + import mimetypes + host, path = splithost(url) + if not host: raise URLError('ftp error: no host given') + host, port = splitport(host) + user, host = splituser(host) + if user: user, passwd = splitpasswd(user) + else: passwd = None + host = unquote(host) + user = unquote(user or '') + passwd = unquote(passwd or '') + host = socket.gethostbyname(host) + if not port: + import ftplib + port = ftplib.FTP_PORT + else: + port = int(port) + path, attrs = splitattr(path) + path = unquote(path) + dirs = path.split('/') + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: dirs = dirs[1:] + if dirs and not dirs[0]: dirs[0] = '/' + key = user, host, port, '/'.join(dirs) + # XXX thread unsafe! + if len(self.ftpcache) > MAXFTPCACHE: + # Prune the cache, rather arbitrarily + for k in self.ftpcache.keys(): + if k != key: + v = self.ftpcache[k] + del self.ftpcache[k] + v.close() + try: + if key not in self.ftpcache: + self.ftpcache[key] = \ + ftpwrapper(user, passwd, host, port, dirs) + if not file: type = 'D' + else: type = 'I' + for attr in attrs: + attr, value = splitvalue(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + (fp, retrlen) = self.ftpcache[key].retrfile(file, type) + mtype = mimetypes.guess_type("ftp:" + url)[0] + headers = "" + if mtype: + headers += "Content-Type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-Length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, "ftp:" + url) + except ftperrors() as exp: + raise_with_traceback(URLError('ftp error %r' % exp)) + + def open_data(self, url, data=None): + """Use "data" URL.""" + if not isinstance(url, str): + raise URLError('data error: proxy support for data protocol currently not implemented') + # ignore POSTed data + # + # syntax of data URLs: + # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + # mediatype := [ type "/" subtype ] *( ";" parameter ) + # data := *urlchar + # parameter := attribute "=" value + try: + [type, data] = url.split(',', 1) + except ValueError: + raise IOError('data error', 'bad data URL') + if not type: + type = 'text/plain;charset=US-ASCII' + semi = type.rfind(';') + if semi >= 0 and '=' not in type[semi:]: + encoding = type[semi+1:] + type = type[:semi] + else: + encoding = '' + msg = [] + msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', + time.gmtime(time.time()))) + msg.append('Content-type: %s' % type) + if encoding == 'base64': + # XXX is this encoding/decoding ok? + data = base64.decodebytes(data.encode('ascii')).decode('latin-1') + else: + data = unquote(data) + msg.append('Content-Length: %d' % len(data)) + msg.append('') + msg.append(data) + msg = '\n'.join(msg) + headers = email.message_from_string(msg) + f = io.StringIO(msg) + #f.fileno = None # needed for addinfourl + return addinfourl(f, headers, url) + + +class FancyURLopener(URLopener): + """Derived class with handlers for errors we can handle (perhaps).""" + + def __init__(self, *args, **kwargs): + URLopener.__init__(self, *args, **kwargs) + self.auth_cache = {} + self.tries = 0 + self.maxtries = 10 + + def http_error_default(self, url, fp, errcode, errmsg, headers): + """Default error handling -- don't raise an exception.""" + return addinfourl(fp, headers, "http:" + url, errcode) + + def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): + """Error 302 -- relocated (temporarily).""" + self.tries += 1 + if self.maxtries and self.tries >= self.maxtries: + if hasattr(self, "http_error_500"): + meth = self.http_error_500 + else: + meth = self.http_error_default + self.tries = 0 + return meth(url, fp, 500, + "Internal Server Error: Redirect Recursion", headers) + result = self.redirect_internal(url, fp, errcode, errmsg, headers, + data) + self.tries = 0 + return result + + def redirect_internal(self, url, fp, errcode, errmsg, headers, data): + if 'location' in headers: + newurl = headers['location'] + elif 'uri' in headers: + newurl = headers['uri'] + else: + return + fp.close() + + # In case the server sent a relative URL, join with original: + newurl = urljoin(self.type + ":" + url, newurl) + + urlparts = urlparse(newurl) + + # For security reasons, we don't allow redirection to anything other + # than http, https and ftp. + + # We are using newer HTTPError with older redirect_internal method + # This older method will get deprecated in 3.3 + + if urlparts.scheme not in ('http', 'https', 'ftp', ''): + raise HTTPError(newurl, errcode, + errmsg + + " Redirection to url '%s' is not allowed." % newurl, + headers, fp) + + return self.open(newurl) + + def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): + """Error 301 -- also relocated (permanently).""" + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + + def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): + """Error 303 -- also relocated (essentially identical to 302).""" + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + + def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): + """Error 307 -- relocated, but turn POST into error.""" + if data is None: + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + else: + return self.http_error_default(url, fp, errcode, errmsg, headers) + + def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, + retry=False): + """Error 401 -- authentication required. + This function supports Basic authentication only.""" + if 'www-authenticate' not in headers: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + stuff = headers['www-authenticate'] + match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) + if not match: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + scheme, realm = match.groups() + if scheme.lower() != 'basic': + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + if not retry: + URLopener.http_error_default(self, url, fp, errcode, errmsg, + headers) + name = 'retry_' + self.type + '_basic_auth' + if data is None: + return getattr(self,name)(url, realm) + else: + return getattr(self,name)(url, realm, data) + + def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, + retry=False): + """Error 407 -- proxy authentication required. + This function supports Basic authentication only.""" + if 'proxy-authenticate' not in headers: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + stuff = headers['proxy-authenticate'] + match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) + if not match: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + scheme, realm = match.groups() + if scheme.lower() != 'basic': + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + if not retry: + URLopener.http_error_default(self, url, fp, errcode, errmsg, + headers) + name = 'retry_proxy_' + self.type + '_basic_auth' + if data is None: + return getattr(self,name)(url, realm) + else: + return getattr(self,name)(url, realm, data) + + def retry_proxy_http_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + newurl = 'http://' + host + selector + proxy = self.proxies['http'] + urltype, proxyhost = splittype(proxy) + proxyhost, proxyselector = splithost(proxyhost) + i = proxyhost.find('@') + 1 + proxyhost = proxyhost[i:] + user, passwd = self.get_user_passwd(proxyhost, realm, i) + if not (user or passwd): return None + proxyhost = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), proxyhost) + self.proxies['http'] = 'http://' + proxyhost + proxyselector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_proxy_https_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + newurl = 'https://' + host + selector + proxy = self.proxies['https'] + urltype, proxyhost = splittype(proxy) + proxyhost, proxyselector = splithost(proxyhost) + i = proxyhost.find('@') + 1 + proxyhost = proxyhost[i:] + user, passwd = self.get_user_passwd(proxyhost, realm, i) + if not (user or passwd): return None + proxyhost = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), proxyhost) + self.proxies['https'] = 'https://' + proxyhost + proxyselector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_http_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + i = host.find('@') + 1 + host = host[i:] + user, passwd = self.get_user_passwd(host, realm, i) + if not (user or passwd): return None + host = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), host) + newurl = 'http://' + host + selector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_https_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + i = host.find('@') + 1 + host = host[i:] + user, passwd = self.get_user_passwd(host, realm, i) + if not (user or passwd): return None + host = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), host) + newurl = 'https://' + host + selector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def get_user_passwd(self, host, realm, clear_cache=0): + key = realm + '@' + host.lower() + if key in self.auth_cache: + if clear_cache: + del self.auth_cache[key] + else: + return self.auth_cache[key] + user, passwd = self.prompt_user_passwd(host, realm) + if user or passwd: self.auth_cache[key] = (user, passwd) + return user, passwd + + def prompt_user_passwd(self, host, realm): + """Override this in a GUI environment!""" + import getpass + try: + user = input("Enter username for %s at %s: " % (realm, host)) + passwd = getpass.getpass("Enter password for %s in %s at %s: " % + (user, realm, host)) + return user, passwd + except KeyboardInterrupt: + print() + return None, None + + +# Utility functions + +_localhost = None +def localhost(): + """Return the IP address of the magic hostname 'localhost'.""" + global _localhost + if _localhost is None: + _localhost = socket.gethostbyname('localhost') + return _localhost + +_thishost = None +def thishost(): + """Return the IP addresses of the current host.""" + global _thishost + if _thishost is None: + try: + _thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2]) + except socket.gaierror: + _thishost = tuple(socket.gethostbyname_ex('localhost')[2]) + return _thishost + +_ftperrors = None +def ftperrors(): + """Return the set of errors raised by the FTP class.""" + global _ftperrors + if _ftperrors is None: + import ftplib + _ftperrors = ftplib.all_errors + return _ftperrors + +_noheaders = None +def noheaders(): + """Return an empty email Message object.""" + global _noheaders + if _noheaders is None: + _noheaders = email.message_from_string("") + return _noheaders + + +# Utility classes + +class ftpwrapper(object): + """Class used by open_ftp() for cache of open FTP connections.""" + + def __init__(self, user, passwd, host, port, dirs, timeout=None, + persistent=True): + self.user = user + self.passwd = passwd + self.host = host + self.port = port + self.dirs = dirs + self.timeout = timeout + self.refcount = 0 + self.keepalive = persistent + self.init() + + def init(self): + import ftplib + self.busy = 0 + self.ftp = ftplib.FTP() + self.ftp.connect(self.host, self.port, self.timeout) + self.ftp.login(self.user, self.passwd) + _target = '/'.join(self.dirs) + self.ftp.cwd(_target) + + def retrfile(self, file, type): + import ftplib + self.endtransfer() + if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 + else: cmd = 'TYPE ' + type; isdir = 0 + try: + self.ftp.voidcmd(cmd) + except ftplib.all_errors: + self.init() + self.ftp.voidcmd(cmd) + conn = None + if file and not isdir: + # Try to retrieve as a file + try: + cmd = 'RETR ' + file + conn, retrlen = self.ftp.ntransfercmd(cmd) + except ftplib.error_perm as reason: + if str(reason)[:3] != '550': + raise_with_traceback(URLError('ftp error: %r' % reason)) + if not conn: + # Set transfer mode to ASCII! + self.ftp.voidcmd('TYPE A') + # Try a directory listing. Verify that directory exists. + if file: + pwd = self.ftp.pwd() + try: + try: + self.ftp.cwd(file) + except ftplib.error_perm as reason: + ### Was: + # raise URLError('ftp error: %r' % reason) from reason + exc = URLError('ftp error: %r' % reason) + exc.__cause__ = reason + raise exc + finally: + self.ftp.cwd(pwd) + cmd = 'LIST ' + file + else: + cmd = 'LIST' + conn, retrlen = self.ftp.ntransfercmd(cmd) + self.busy = 1 + + ftpobj = addclosehook(conn.makefile('rb'), self.file_close) + self.refcount += 1 + conn.close() + # Pass back both a suitably decorated object and a retrieval length + return (ftpobj, retrlen) + + def endtransfer(self): + self.busy = 0 + + def close(self): + self.keepalive = False + if self.refcount <= 0: + self.real_close() + + def file_close(self): + self.endtransfer() + self.refcount -= 1 + if self.refcount <= 0 and not self.keepalive: + self.real_close() + + def real_close(self): + self.endtransfer() + try: + self.ftp.close() + except ftperrors(): + pass + +# Proxy handling +def getproxies_environment(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Scan the environment for variables named _proxy; + this seems to be the standard convention. If you need a + different way, you can pass a proxies dictionary to the + [Fancy]URLopener constructor. + + """ + proxies = {} + for name, value in os.environ.items(): + name = name.lower() + if value and name[-6:] == '_proxy': + proxies[name[:-6]] = value + return proxies + +def proxy_bypass_environment(host): + """Test if proxies should not be used for a particular host. + + Checks the environment for a variable named no_proxy, which should + be a list of DNS suffixes separated by commas, or '*' for all hosts. + """ + no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '') + # '*' is special case for always bypass + if no_proxy == '*': + return 1 + # strip port off host + hostonly, port = splitport(host) + # check if the host ends with any of the DNS suffixes + no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] + for name in no_proxy_list: + if name and (hostonly.endswith(name) or host.endswith(name)): + return 1 + # otherwise, don't bypass + return 0 + + +# This code tests an OSX specific data structure but is testable on all +# platforms +def _proxy_bypass_macosx_sysconf(host, proxy_settings): + """ + Return True iff this host shouldn't be accessed using a proxy + + This function uses the MacOSX framework SystemConfiguration + to fetch the proxy information. + + proxy_settings come from _scproxy._get_proxy_settings or get mocked ie: + { 'exclude_simple': bool, + 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16'] + } + """ + from fnmatch import fnmatch + + hostonly, port = splitport(host) + + def ip2num(ipAddr): + parts = ipAddr.split('.') + parts = list(map(int, parts)) + if len(parts) != 4: + parts = (parts + [0, 0, 0, 0])[:4] + return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3] + + # Check for simple host names: + if '.' not in host: + if proxy_settings['exclude_simple']: + return True + + hostIP = None + + for value in proxy_settings.get('exceptions', ()): + # Items in the list are strings like these: *.local, 169.254/16 + if not value: continue + + m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) + if m is not None: + if hostIP is None: + try: + hostIP = socket.gethostbyname(hostonly) + hostIP = ip2num(hostIP) + except socket.error: + continue + + base = ip2num(m.group(1)) + mask = m.group(2) + if mask is None: + mask = 8 * (m.group(1).count('.') + 1) + else: + mask = int(mask[1:]) + mask = 32 - mask + + if (hostIP >> mask) == (base >> mask): + return True + + elif fnmatch(host, value): + return True + + return False + + +if sys.platform == 'darwin': + from _scproxy import _get_proxy_settings, _get_proxies + + def proxy_bypass_macosx_sysconf(host): + proxy_settings = _get_proxy_settings() + return _proxy_bypass_macosx_sysconf(host, proxy_settings) + + def getproxies_macosx_sysconf(): + """Return a dictionary of scheme -> proxy server URL mappings. + + This function uses the MacOSX framework SystemConfiguration + to fetch the proxy information. + """ + return _get_proxies() + + + + def proxy_bypass(host): + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_macosx_sysconf(host) + + def getproxies(): + return getproxies_environment() or getproxies_macosx_sysconf() + + +elif os.name == 'nt': + def getproxies_registry(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Win32 uses the registry to store proxies. + + """ + proxies = {} + try: + import winreg + except ImportError: + # Std module, so should be around - but you never know! + return proxies + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + proxyEnable = winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0] + if proxyEnable: + # Returned as Unicode but problems if not converted to ASCII + proxyServer = str(winreg.QueryValueEx(internetSettings, + 'ProxyServer')[0]) + if '=' in proxyServer: + # Per-protocol settings + for p in proxyServer.split(';'): + protocol, address = p.split('=', 1) + # See if address has a type:// prefix + if not re.match('^([^/:]+)://', address): + address = '%s://%s' % (protocol, address) + proxies[protocol] = address + else: + # Use one setting for all protocols + if proxyServer[:5] == 'http:': + proxies['http'] = proxyServer + else: + proxies['http'] = 'http://%s' % proxyServer + proxies['https'] = 'https://%s' % proxyServer + proxies['ftp'] = 'ftp://%s' % proxyServer + internetSettings.Close() + except (WindowsError, ValueError, TypeError): + # Either registry key not found etc, or the value in an + # unexpected format. + # proxies already set up to be empty so nothing to do + pass + return proxies + + def getproxies(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Returns settings gathered from the environment, if specified, + or the registry. + + """ + return getproxies_environment() or getproxies_registry() + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + # Std modules, so should be around - but you never know! + return 0 + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + proxyEnable = winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0] + proxyOverride = str(winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0]) + # ^^^^ Returned as Unicode but problems if not converted to ASCII + except WindowsError: + return 0 + if not proxyEnable or not proxyOverride: + return 0 + # try to make a host list from name and IP address. + rawHost, port = splitport(host) + host = [rawHost] + try: + addr = socket.gethostbyname(rawHost) + if addr != rawHost: + host.append(addr) + except socket.error: + pass + try: + fqdn = socket.getfqdn(rawHost) + if fqdn != rawHost: + host.append(fqdn) + except socket.error: + pass + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '': + if '.' not in rawHost: + return 1 + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + for val in host: + if re.match(test, val, re.I): + return 1 + return 0 + + def proxy_bypass(host): + """Return a dictionary of scheme -> proxy server URL mappings. + + Returns settings gathered from the environment, if specified, + or the registry. + + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + +else: + # By default use environment variables + getproxies = getproxies_environment + proxy_bypass = proxy_bypass_environment diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/response.py b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/response.py new file mode 100644 index 000000000..adbf6e5ae --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/response.py @@ -0,0 +1,103 @@ +"""Response classes used by urllib. + +The base class, addbase, defines a minimal file-like interface, +including read() and readline(). The typical response object is an +addinfourl instance, which defines an info() method that returns +headers and a geturl() method that returns the url. +""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import object + +class addbase(object): + """Base class for addinfo and addclosehook.""" + + # XXX Add a method to expose the timeout on the underlying socket? + + def __init__(self, fp): + # TODO(jhylton): Is there a better way to delegate using io? + self.fp = fp + self.read = self.fp.read + self.readline = self.fp.readline + # TODO(jhylton): Make sure an object with readlines() is also iterable + if hasattr(self.fp, "readlines"): + self.readlines = self.fp.readlines + if hasattr(self.fp, "fileno"): + self.fileno = self.fp.fileno + else: + self.fileno = lambda: None + + def __iter__(self): + # Assigning `__iter__` to the instance doesn't work as intended + # because the iter builtin does something like `cls.__iter__(obj)` + # and thus fails to find the _bound_ method `obj.__iter__`. + # Returning just `self.fp` works for built-in file objects but + # might not work for general file-like objects. + return iter(self.fp) + + def __repr__(self): + return '<%s at %r whose fp = %r>' % (self.__class__.__name__, + id(self), self.fp) + + def close(self): + if self.fp: + self.fp.close() + self.fp = None + self.read = None + self.readline = None + self.readlines = None + self.fileno = None + self.__iter__ = None + self.__next__ = None + + def __enter__(self): + if self.fp is None: + raise ValueError("I/O operation on closed file") + return self + + def __exit__(self, type, value, traceback): + self.close() + +class addclosehook(addbase): + """Class to add a close hook to an open file.""" + + def __init__(self, fp, closehook, *hookargs): + addbase.__init__(self, fp) + self.closehook = closehook + self.hookargs = hookargs + + def close(self): + if self.closehook: + self.closehook(*self.hookargs) + self.closehook = None + self.hookargs = None + addbase.close(self) + +class addinfo(addbase): + """class to add an info() method to an open file.""" + + def __init__(self, fp, headers): + addbase.__init__(self, fp) + self.headers = headers + + def info(self): + return self.headers + +class addinfourl(addbase): + """class to add info() and geturl() methods to an open file.""" + + def __init__(self, fp, headers, url, code=None): + addbase.__init__(self, fp) + self.headers = headers + self.url = url + self.code = code + + def info(self): + return self.headers + + def getcode(self): + return self.code + + def geturl(self): + return self.url + +del absolute_import, division, unicode_literals, object diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/robotparser.py b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/robotparser.py new file mode 100644 index 000000000..a0f36511b --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/urllib/robotparser.py @@ -0,0 +1,211 @@ +from __future__ import absolute_import, division, unicode_literals +from future.builtins import str +""" robotparser.py + + Copyright (C) 2000 Bastian Kleineidam + + You can choose between two licenses when using this package: + 1) GNU GPLv2 + 2) PSF license for Python 2.2 + + The robots.txt Exclusion Protocol is implemented as specified in + http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html +""" + +# Was: import urllib.parse, urllib.request +from future.backports import urllib +from future.backports.urllib import parse as _parse, request as _request +urllib.parse = _parse +urllib.request = _request + + +__all__ = ["RobotFileParser"] + +class RobotFileParser(object): + """ This class provides a set of methods to read, parse and answer + questions about a single robots.txt file. + + """ + + def __init__(self, url=''): + self.entries = [] + self.default_entry = None + self.disallow_all = False + self.allow_all = False + self.set_url(url) + self.last_checked = 0 + + def mtime(self): + """Returns the time the robots.txt file was last fetched. + + This is useful for long-running web spiders that need to + check for new robots.txt files periodically. + + """ + return self.last_checked + + def modified(self): + """Sets the time the robots.txt file was last fetched to the + current time. + + """ + import time + self.last_checked = time.time() + + def set_url(self, url): + """Sets the URL referring to a robots.txt file.""" + self.url = url + self.host, self.path = urllib.parse.urlparse(url)[1:3] + + def read(self): + """Reads the robots.txt URL and feeds it to the parser.""" + try: + f = urllib.request.urlopen(self.url) + except urllib.error.HTTPError as err: + if err.code in (401, 403): + self.disallow_all = True + elif err.code >= 400: + self.allow_all = True + else: + raw = f.read() + self.parse(raw.decode("utf-8").splitlines()) + + def _add_entry(self, entry): + if "*" in entry.useragents: + # the default entry is considered last + if self.default_entry is None: + # the first default entry wins + self.default_entry = entry + else: + self.entries.append(entry) + + def parse(self, lines): + """Parse the input lines from a robots.txt file. + + We allow that a user-agent: line is not preceded by + one or more blank lines. + """ + # states: + # 0: start state + # 1: saw user-agent line + # 2: saw an allow or disallow line + state = 0 + entry = Entry() + + for line in lines: + if not line: + if state == 1: + entry = Entry() + state = 0 + elif state == 2: + self._add_entry(entry) + entry = Entry() + state = 0 + # remove optional comment and strip line + i = line.find('#') + if i >= 0: + line = line[:i] + line = line.strip() + if not line: + continue + line = line.split(':', 1) + if len(line) == 2: + line[0] = line[0].strip().lower() + line[1] = urllib.parse.unquote(line[1].strip()) + if line[0] == "user-agent": + if state == 2: + self._add_entry(entry) + entry = Entry() + entry.useragents.append(line[1]) + state = 1 + elif line[0] == "disallow": + if state != 0: + entry.rulelines.append(RuleLine(line[1], False)) + state = 2 + elif line[0] == "allow": + if state != 0: + entry.rulelines.append(RuleLine(line[1], True)) + state = 2 + if state == 2: + self._add_entry(entry) + + + def can_fetch(self, useragent, url): + """using the parsed robots.txt decide if useragent can fetch url""" + if self.disallow_all: + return False + if self.allow_all: + return True + # search for given user agent matches + # the first match counts + parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url)) + url = urllib.parse.urlunparse(('','',parsed_url.path, + parsed_url.params,parsed_url.query, parsed_url.fragment)) + url = urllib.parse.quote(url) + if not url: + url = "/" + for entry in self.entries: + if entry.applies_to(useragent): + return entry.allowance(url) + # try the default entry last + if self.default_entry: + return self.default_entry.allowance(url) + # agent not found ==> access granted + return True + + def __str__(self): + return ''.join([str(entry) + "\n" for entry in self.entries]) + + +class RuleLine(object): + """A rule line is a single "Allow:" (allowance==True) or "Disallow:" + (allowance==False) followed by a path.""" + def __init__(self, path, allowance): + if path == '' and not allowance: + # an empty value means allow all + allowance = True + self.path = urllib.parse.quote(path) + self.allowance = allowance + + def applies_to(self, filename): + return self.path == "*" or filename.startswith(self.path) + + def __str__(self): + return (self.allowance and "Allow" or "Disallow") + ": " + self.path + + +class Entry(object): + """An entry has one or more user-agents and zero or more rulelines""" + def __init__(self): + self.useragents = [] + self.rulelines = [] + + def __str__(self): + ret = [] + for agent in self.useragents: + ret.extend(["User-agent: ", agent, "\n"]) + for line in self.rulelines: + ret.extend([str(line), "\n"]) + return ''.join(ret) + + def applies_to(self, useragent): + """check if this entry applies to the specified agent""" + # split the name token and make it lower case + useragent = useragent.split("/")[0].lower() + for agent in self.useragents: + if agent == '*': + # we have the catch-all agent + return True + agent = agent.lower() + if agent in useragent: + return True + return False + + def allowance(self, filename): + """Preconditions: + - our agent applies to this entry + - filename is URL decoded""" + for line in self.rulelines: + if line.applies_to(filename): + return line.allowance + return True diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/__init__.py new file mode 100644 index 000000000..196d37885 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/__init__.py @@ -0,0 +1 @@ +# This directory is a Python package. diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/client.py b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/client.py new file mode 100644 index 000000000..b78e5bad6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/client.py @@ -0,0 +1,1496 @@ +# +# XML-RPC CLIENT LIBRARY +# $Id$ +# +# an XML-RPC client interface for Python. +# +# the marshalling and response parser code can also be used to +# implement XML-RPC servers. +# +# Notes: +# this version is designed to work with Python 2.1 or newer. +# +# History: +# 1999-01-14 fl Created +# 1999-01-15 fl Changed dateTime to use localtime +# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service +# 1999-01-19 fl Fixed array data element (from Skip Montanaro) +# 1999-01-21 fl Fixed dateTime constructor, etc. +# 1999-02-02 fl Added fault handling, handle empty sequences, etc. +# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro) +# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8) +# 2000-11-28 fl Changed boolean to check the truth value of its argument +# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches +# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1) +# 2001-03-28 fl Make sure response tuple is a singleton +# 2001-03-29 fl Don't require empty params element (from Nicholas Riley) +# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2) +# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod) +# 2001-09-03 fl Allow Transport subclass to override getparser +# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup) +# 2001-10-01 fl Remove containers from memo cache when done with them +# 2001-10-01 fl Use faster escape method (80% dumps speedup) +# 2001-10-02 fl More dumps microtuning +# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum) +# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow +# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems) +# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix) +# 2002-03-17 fl Avoid buffered read when possible (from James Rucker) +# 2002-04-07 fl Added pythondoc comments +# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers +# 2002-05-15 fl Added error constants (from Andrew Kuchling) +# 2002-06-27 fl Merged with Python CVS version +# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby) +# 2003-01-22 sm Add support for the bool type +# 2003-02-27 gvr Remove apply calls +# 2003-04-24 sm Use cStringIO if available +# 2003-04-25 ak Add support for nil +# 2003-06-15 gn Add support for time.struct_time +# 2003-07-12 gp Correct marshalling of Faults +# 2003-10-31 mvl Add multicall support +# 2004-08-20 mvl Bump minimum supported Python version to 2.1 +# +# Copyright (c) 1999-2002 by Secret Labs AB. +# Copyright (c) 1999-2002 by Fredrik Lundh. +# +# info@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The XML-RPC client interface is +# +# Copyright (c) 1999-2002 by Secret Labs AB +# Copyright (c) 1999-2002 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +""" +Ported using Python-Future from the Python 3.3 standard library. + +An XML-RPC client interface for Python. + +The marshalling and response parser code can also be used to +implement XML-RPC servers. + +Exported exceptions: + + Error Base class for client errors + ProtocolError Indicates an HTTP protocol error + ResponseError Indicates a broken response package + Fault Indicates an XML-RPC fault package + +Exported classes: + + ServerProxy Represents a logical connection to an XML-RPC server + + MultiCall Executor of boxcared xmlrpc requests + DateTime dateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate a "dateTime.iso8601" + XML-RPC value + Binary binary data wrapper + + Marshaller Generate an XML-RPC params chunk from a Python data structure + Unmarshaller Unmarshal an XML-RPC response from incoming XML event message + Transport Handles an HTTP transaction to an XML-RPC server + SafeTransport Handles an HTTPS transaction to an XML-RPC server + +Exported constants: + + (none) + +Exported functions: + + getparser Create instance of the fastest available parser & attach + to an unmarshalling object + dumps Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + loads Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) +from future.builtins import bytes, dict, int, range, str + +import base64 +# Py2.7 compatibility hack +base64.encodebytes = base64.encodestring +base64.decodebytes = base64.decodestring +import sys +import time +from datetime import datetime +from future.backports.http import client as http_client +from future.backports.urllib import parse as urllib_parse +from future.utils import ensure_new_type +from xml.parsers import expat +import socket +import errno +from io import BytesIO +try: + import gzip +except ImportError: + gzip = None #python can be built without zlib/gzip support + +# -------------------------------------------------------------------- +# Internal stuff + +def escape(s): + s = s.replace("&", "&") + s = s.replace("<", "<") + return s.replace(">", ">",) + +# used in User-Agent header sent +__version__ = sys.version[:3] + +# xmlrpc integer limits +MAXINT = 2**31-1 +MININT = -2**31 + +# -------------------------------------------------------------------- +# Error constants (from Dan Libby's specification at +# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php) + +# Ranges of errors +PARSE_ERROR = -32700 +SERVER_ERROR = -32600 +APPLICATION_ERROR = -32500 +SYSTEM_ERROR = -32400 +TRANSPORT_ERROR = -32300 + +# Specific errors +NOT_WELLFORMED_ERROR = -32700 +UNSUPPORTED_ENCODING = -32701 +INVALID_ENCODING_CHAR = -32702 +INVALID_XMLRPC = -32600 +METHOD_NOT_FOUND = -32601 +INVALID_METHOD_PARAMS = -32602 +INTERNAL_ERROR = -32603 + +# -------------------------------------------------------------------- +# Exceptions + +## +# Base class for all kinds of client-side errors. + +class Error(Exception): + """Base class for client errors.""" + def __str__(self): + return repr(self) + +## +# Indicates an HTTP-level protocol error. This is raised by the HTTP +# transport layer, if the server returns an error code other than 200 +# (OK). +# +# @param url The target URL. +# @param errcode The HTTP error code. +# @param errmsg The HTTP error message. +# @param headers The HTTP header dictionary. + +class ProtocolError(Error): + """Indicates an HTTP protocol error.""" + def __init__(self, url, errcode, errmsg, headers): + Error.__init__(self) + self.url = url + self.errcode = errcode + self.errmsg = errmsg + self.headers = headers + def __repr__(self): + return ( + "" % + (self.url, self.errcode, self.errmsg) + ) + +## +# Indicates a broken XML-RPC response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response is +# malformed. + +class ResponseError(Error): + """Indicates a broken response package.""" + pass + +## +# Indicates an XML-RPC fault response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response contains +# a fault string. This exception can also be used as a class, to +# generate a fault XML-RPC message. +# +# @param faultCode The XML-RPC fault code. +# @param faultString The XML-RPC fault string. + +class Fault(Error): + """Indicates an XML-RPC fault package.""" + def __init__(self, faultCode, faultString, **extra): + Error.__init__(self) + self.faultCode = faultCode + self.faultString = faultString + def __repr__(self): + return "" % (ensure_new_type(self.faultCode), + ensure_new_type(self.faultString)) + +# -------------------------------------------------------------------- +# Special values + +## +# Backwards compatibility + +boolean = Boolean = bool + +## +# Wrapper for XML-RPC DateTime values. This converts a time value to +# the format used by XML-RPC. +#

+# The value can be given as a datetime object, as a string in the +# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by +# time.localtime()), or an integer value (as returned by time.time()). +# The wrapper uses time.localtime() to convert an integer to a time +# tuple. +# +# @param value The time, given as a datetime object, an ISO 8601 string, +# a time tuple, or an integer time value. + + +### For Python-Future: +def _iso8601_format(value): + return "%04d%02d%02dT%02d:%02d:%02d" % ( + value.year, value.month, value.day, + value.hour, value.minute, value.second) +### +# Issue #13305: different format codes across platforms +# _day0 = datetime(1, 1, 1) +# if _day0.strftime('%Y') == '0001': # Mac OS X +# def _iso8601_format(value): +# return value.strftime("%Y%m%dT%H:%M:%S") +# elif _day0.strftime('%4Y') == '0001': # Linux +# def _iso8601_format(value): +# return value.strftime("%4Y%m%dT%H:%M:%S") +# else: +# def _iso8601_format(value): +# return value.strftime("%Y%m%dT%H:%M:%S").zfill(17) +# del _day0 + + +def _strftime(value): + if isinstance(value, datetime): + return _iso8601_format(value) + + if not isinstance(value, (tuple, time.struct_time)): + if value == 0: + value = time.time() + value = time.localtime(value) + + return "%04d%02d%02dT%02d:%02d:%02d" % value[:6] + +class DateTime(object): + """DateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate 'dateTime.iso8601' XML-RPC + value. + """ + + def __init__(self, value=0): + if isinstance(value, str): + self.value = value + else: + self.value = _strftime(value) + + def make_comparable(self, other): + if isinstance(other, DateTime): + s = self.value + o = other.value + elif isinstance(other, datetime): + s = self.value + o = _iso8601_format(other) + elif isinstance(other, str): + s = self.value + o = other + elif hasattr(other, "timetuple"): + s = self.timetuple() + o = other.timetuple() + else: + otype = (hasattr(other, "__class__") + and other.__class__.__name__ + or type(other)) + raise TypeError("Can't compare %s and %s" % + (self.__class__.__name__, otype)) + return s, o + + def __lt__(self, other): + s, o = self.make_comparable(other) + return s < o + + def __le__(self, other): + s, o = self.make_comparable(other) + return s <= o + + def __gt__(self, other): + s, o = self.make_comparable(other) + return s > o + + def __ge__(self, other): + s, o = self.make_comparable(other) + return s >= o + + def __eq__(self, other): + s, o = self.make_comparable(other) + return s == o + + def __ne__(self, other): + s, o = self.make_comparable(other) + return s != o + + def timetuple(self): + return time.strptime(self.value, "%Y%m%dT%H:%M:%S") + + ## + # Get date/time value. + # + # @return Date/time value, as an ISO 8601 string. + + def __str__(self): + return self.value + + def __repr__(self): + return "" % (ensure_new_type(self.value), id(self)) + + def decode(self, data): + self.value = str(data).strip() + + def encode(self, out): + out.write("") + out.write(self.value) + out.write("\n") + +def _datetime(data): + # decode xml element contents into a DateTime structure. + value = DateTime() + value.decode(data) + return value + +def _datetime_type(data): + return datetime.strptime(data, "%Y%m%dT%H:%M:%S") + +## +# Wrapper for binary data. This can be used to transport any kind +# of binary data over XML-RPC, using BASE64 encoding. +# +# @param data An 8-bit string containing arbitrary data. + +class Binary(object): + """Wrapper for binary data.""" + + def __init__(self, data=None): + if data is None: + data = b"" + else: + if not isinstance(data, (bytes, bytearray)): + raise TypeError("expected bytes or bytearray, not %s" % + data.__class__.__name__) + data = bytes(data) # Make a copy of the bytes! + self.data = data + + ## + # Get buffer contents. + # + # @return Buffer contents, as an 8-bit string. + + def __str__(self): + return str(self.data, "latin-1") # XXX encoding?! + + def __eq__(self, other): + if isinstance(other, Binary): + other = other.data + return self.data == other + + def __ne__(self, other): + if isinstance(other, Binary): + other = other.data + return self.data != other + + def decode(self, data): + self.data = base64.decodebytes(data) + + def encode(self, out): + out.write("\n") + encoded = base64.encodebytes(self.data) + out.write(encoded.decode('ascii')) + out.write("\n") + +def _binary(data): + # decode xml element contents into a Binary structure + value = Binary() + value.decode(data) + return value + +WRAPPERS = (DateTime, Binary) + +# -------------------------------------------------------------------- +# XML parsers + +class ExpatParser(object): + # fast expat parser for Python 2.0 and later. + def __init__(self, target): + self._parser = parser = expat.ParserCreate(None, None) + self._target = target + parser.StartElementHandler = target.start + parser.EndElementHandler = target.end + parser.CharacterDataHandler = target.data + encoding = None + target.xml(encoding, None) + + def feed(self, data): + self._parser.Parse(data, 0) + + def close(self): + self._parser.Parse("", 1) # end of data + del self._target, self._parser # get rid of circular references + +# -------------------------------------------------------------------- +# XML-RPC marshalling and unmarshalling code + +## +# XML-RPC marshaller. +# +# @param encoding Default encoding for 8-bit strings. The default +# value is None (interpreted as UTF-8). +# @see dumps + +class Marshaller(object): + """Generate an XML-RPC params chunk from a Python data structure. + + Create a Marshaller instance for each set of parameters, and use + the "dumps" method to convert your data (represented as a tuple) + to an XML-RPC params chunk. To write a fault response, pass a + Fault instance instead. You may prefer to use the "dumps" module + function for this purpose. + """ + + # by the way, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, encoding=None, allow_none=False): + self.memo = {} + self.data = None + self.encoding = encoding + self.allow_none = allow_none + + dispatch = {} + + def dumps(self, values): + out = [] + write = out.append + dump = self.__dump + if isinstance(values, Fault): + # fault instance + write("\n") + dump({'faultCode': values.faultCode, + 'faultString': values.faultString}, + write) + write("\n") + else: + # parameter block + # FIXME: the xml-rpc specification allows us to leave out + # the entire block if there are no parameters. + # however, changing this may break older code (including + # old versions of xmlrpclib.py), so this is better left as + # is for now. See @XMLRPC3 for more information. /F + write("\n") + for v in values: + write("\n") + dump(v, write) + write("\n") + write("\n") + result = "".join(out) + return str(result) + + def __dump(self, value, write): + try: + f = self.dispatch[type(ensure_new_type(value))] + except KeyError: + # check if this object can be marshalled as a structure + if not hasattr(value, '__dict__'): + raise TypeError("cannot marshal %s objects" % type(value)) + # check if this class is a sub-class of a basic type, + # because we don't know how to marshal these types + # (e.g. a string sub-class) + for type_ in type(value).__mro__: + if type_ in self.dispatch.keys(): + raise TypeError("cannot marshal %s objects" % type(value)) + # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix + # for the p3yk merge, this should probably be fixed more neatly. + f = self.dispatch["_arbitrary_instance"] + f(self, value, write) + + def dump_nil (self, value, write): + if not self.allow_none: + raise TypeError("cannot marshal None unless allow_none is enabled") + write("") + dispatch[type(None)] = dump_nil + + def dump_bool(self, value, write): + write("") + write(value and "1" or "0") + write("\n") + dispatch[bool] = dump_bool + + def dump_long(self, value, write): + if value > MAXINT or value < MININT: + raise OverflowError("long int exceeds XML-RPC limits") + write("") + write(str(int(value))) + write("\n") + dispatch[int] = dump_long + + # backward compatible + dump_int = dump_long + + def dump_double(self, value, write): + write("") + write(repr(ensure_new_type(value))) + write("\n") + dispatch[float] = dump_double + + def dump_unicode(self, value, write, escape=escape): + write("") + write(escape(value)) + write("\n") + dispatch[str] = dump_unicode + + def dump_bytes(self, value, write): + write("\n") + encoded = base64.encodebytes(value) + write(encoded.decode('ascii')) + write("\n") + dispatch[bytes] = dump_bytes + dispatch[bytearray] = dump_bytes + + def dump_array(self, value, write): + i = id(value) + if i in self.memo: + raise TypeError("cannot marshal recursive sequences") + self.memo[i] = None + dump = self.__dump + write("\n") + for v in value: + dump(v, write) + write("\n") + del self.memo[i] + dispatch[tuple] = dump_array + dispatch[list] = dump_array + + def dump_struct(self, value, write, escape=escape): + i = id(value) + if i in self.memo: + raise TypeError("cannot marshal recursive dictionaries") + self.memo[i] = None + dump = self.__dump + write("\n") + for k, v in value.items(): + write("\n") + if not isinstance(k, str): + raise TypeError("dictionary key must be string") + write("%s\n" % escape(k)) + dump(v, write) + write("\n") + write("\n") + del self.memo[i] + dispatch[dict] = dump_struct + + def dump_datetime(self, value, write): + write("") + write(_strftime(value)) + write("\n") + dispatch[datetime] = dump_datetime + + def dump_instance(self, value, write): + # check for special wrappers + if value.__class__ in WRAPPERS: + self.write = write + value.encode(self) + del self.write + else: + # store instance attributes as a struct (really?) + self.dump_struct(value.__dict__, write) + dispatch[DateTime] = dump_instance + dispatch[Binary] = dump_instance + # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix + # for the p3yk merge, this should probably be fixed more neatly. + dispatch["_arbitrary_instance"] = dump_instance + +## +# XML-RPC unmarshaller. +# +# @see loads + +class Unmarshaller(object): + """Unmarshal an XML-RPC response, based on incoming XML event + messages (start, data, end). Call close() to get the resulting + data structure. + + Note that this reader is fairly tolerant, and gladly accepts bogus + XML-RPC data without complaining (but not bogus XML). + """ + + # and again, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, use_datetime=False, use_builtin_types=False): + self._type = None + self._stack = [] + self._marks = [] + self._data = [] + self._methodname = None + self._encoding = "utf-8" + self.append = self._stack.append + self._use_datetime = use_builtin_types or use_datetime + self._use_bytes = use_builtin_types + + def close(self): + # return response tuple and target method + if self._type is None or self._marks: + raise ResponseError() + if self._type == "fault": + raise Fault(**self._stack[0]) + return tuple(self._stack) + + def getmethodname(self): + return self._methodname + + # + # event handlers + + def xml(self, encoding, standalone): + self._encoding = encoding + # FIXME: assert standalone == 1 ??? + + def start(self, tag, attrs): + # prepare to handle this element + if tag == "array" or tag == "struct": + self._marks.append(len(self._stack)) + self._data = [] + self._value = (tag == "value") + + def data(self, text): + self._data.append(text) + + def end(self, tag): + # call the appropriate end tag handler + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, "".join(self._data)) + + # + # accelerator support + + def end_dispatch(self, tag, data): + # dispatch data + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, data) + + # + # element decoders + + dispatch = {} + + def end_nil (self, data): + self.append(None) + self._value = 0 + dispatch["nil"] = end_nil + + def end_boolean(self, data): + if data == "0": + self.append(False) + elif data == "1": + self.append(True) + else: + raise TypeError("bad boolean value") + self._value = 0 + dispatch["boolean"] = end_boolean + + def end_int(self, data): + self.append(int(data)) + self._value = 0 + dispatch["i4"] = end_int + dispatch["i8"] = end_int + dispatch["int"] = end_int + + def end_double(self, data): + self.append(float(data)) + self._value = 0 + dispatch["double"] = end_double + + def end_string(self, data): + if self._encoding: + data = data.decode(self._encoding) + self.append(data) + self._value = 0 + dispatch["string"] = end_string + dispatch["name"] = end_string # struct keys are always strings + + def end_array(self, data): + mark = self._marks.pop() + # map arrays to Python lists + self._stack[mark:] = [self._stack[mark:]] + self._value = 0 + dispatch["array"] = end_array + + def end_struct(self, data): + mark = self._marks.pop() + # map structs to Python dictionaries + dict = {} + items = self._stack[mark:] + for i in range(0, len(items), 2): + dict[items[i]] = items[i+1] + self._stack[mark:] = [dict] + self._value = 0 + dispatch["struct"] = end_struct + + def end_base64(self, data): + value = Binary() + value.decode(data.encode("ascii")) + if self._use_bytes: + value = value.data + self.append(value) + self._value = 0 + dispatch["base64"] = end_base64 + + def end_dateTime(self, data): + value = DateTime() + value.decode(data) + if self._use_datetime: + value = _datetime_type(data) + self.append(value) + dispatch["dateTime.iso8601"] = end_dateTime + + def end_value(self, data): + # if we stumble upon a value element with no internal + # elements, treat it as a string element + if self._value: + self.end_string(data) + dispatch["value"] = end_value + + def end_params(self, data): + self._type = "params" + dispatch["params"] = end_params + + def end_fault(self, data): + self._type = "fault" + dispatch["fault"] = end_fault + + def end_methodName(self, data): + if self._encoding: + data = data.decode(self._encoding) + self._methodname = data + self._type = "methodName" # no params + dispatch["methodName"] = end_methodName + +## Multicall support +# + +class _MultiCallMethod(object): + # some lesser magic to store calls made to a MultiCall object + # for batch execution + def __init__(self, call_list, name): + self.__call_list = call_list + self.__name = name + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + self.__call_list.append((self.__name, args)) + +class MultiCallIterator(object): + """Iterates over the results of a multicall. Exceptions are + raised in response to xmlrpc faults.""" + + def __init__(self, results): + self.results = results + + def __getitem__(self, i): + item = self.results[i] + if isinstance(type(item), dict): + raise Fault(item['faultCode'], item['faultString']) + elif type(item) == type([]): + return item[0] + else: + raise ValueError("unexpected type in multicall result") + +class MultiCall(object): + """server -> a object used to boxcar method calls + + server should be a ServerProxy object. + + Methods can be added to the MultiCall using normal + method call syntax e.g.: + + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") + + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ + + def __init__(self, server): + self.__server = server + self.__call_list = [] + + def __repr__(self): + return "" % id(self) + + __str__ = __repr__ + + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, name) + + def __call__(self): + marshalled_list = [] + for name, args in self.__call_list: + marshalled_list.append({'methodName' : name, 'params' : args}) + + return MultiCallIterator(self.__server.system.multicall(marshalled_list)) + +# -------------------------------------------------------------------- +# convenience functions + +FastMarshaller = FastParser = FastUnmarshaller = None + +## +# Create a parser object, and connect it to an unmarshalling instance. +# This function picks the fastest available XML parser. +# +# return A (parser, unmarshaller) tuple. + +def getparser(use_datetime=False, use_builtin_types=False): + """getparser() -> parser, unmarshaller + + Create an instance of the fastest available parser, and attach it + to an unmarshalling object. Return both objects. + """ + if FastParser and FastUnmarshaller: + if use_builtin_types: + mkdatetime = _datetime_type + mkbytes = base64.decodebytes + elif use_datetime: + mkdatetime = _datetime_type + mkbytes = _binary + else: + mkdatetime = _datetime + mkbytes = _binary + target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault) + parser = FastParser(target) + else: + target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types) + if FastParser: + parser = FastParser(target) + else: + parser = ExpatParser(target) + return parser, target + +## +# Convert a Python tuple or a Fault instance to an XML-RPC packet. +# +# @def dumps(params, **options) +# @param params A tuple or Fault instance. +# @keyparam methodname If given, create a methodCall request for +# this method name. +# @keyparam methodresponse If given, create a methodResponse packet. +# If used with a tuple, the tuple must be a singleton (that is, +# it must contain exactly one element). +# @keyparam encoding The packet encoding. +# @return A string containing marshalled data. + +def dumps(params, methodname=None, methodresponse=None, encoding=None, + allow_none=False): + """data [,options] -> marshalled data + + Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + + In addition to the data object, the following options can be given + as keyword arguments: + + methodname: the method name for a methodCall packet + + methodresponse: true to create a methodResponse packet. + If this option is used with a tuple, the tuple must be + a singleton (i.e. it can contain only one element). + + encoding: the packet encoding (default is UTF-8) + + All byte strings in the data structure are assumed to use the + packet encoding. Unicode strings are automatically converted, + where necessary. + """ + + assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance" + if isinstance(params, Fault): + methodresponse = 1 + elif methodresponse and isinstance(params, tuple): + assert len(params) == 1, "response tuple must be a singleton" + + if not encoding: + encoding = "utf-8" + + if FastMarshaller: + m = FastMarshaller(encoding) + else: + m = Marshaller(encoding, allow_none) + + data = m.dumps(params) + + if encoding != "utf-8": + xmlheader = "\n" % str(encoding) + else: + xmlheader = "\n" # utf-8 is default + + # standard XML-RPC wrappings + if methodname: + # a method call + if not isinstance(methodname, str): + methodname = methodname.encode(encoding) + data = ( + xmlheader, + "\n" + "", methodname, "\n", + data, + "\n" + ) + elif methodresponse: + # a method response, or a fault structure + data = ( + xmlheader, + "\n", + data, + "\n" + ) + else: + return data # return as is + return str("").join(data) + +## +# Convert an XML-RPC packet to a Python object. If the XML-RPC packet +# represents a fault condition, this function raises a Fault exception. +# +# @param data An XML-RPC packet, given as an 8-bit string. +# @return A tuple containing the unpacked data, and the method name +# (None if not present). +# @see Fault + +def loads(data, use_datetime=False, use_builtin_types=False): + """data -> unmarshalled data, method name + + Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). + + If the XML-RPC packet represents a fault condition, this function + raises a Fault exception. + """ + p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types) + p.feed(data) + p.close() + return u.close(), u.getmethodname() + +## +# Encode a string using the gzip content encoding such as specified by the +# Content-Encoding: gzip +# in the HTTP header, as described in RFC 1952 +# +# @param data the unencoded data +# @return the encoded data + +def gzip_encode(data): + """data -> gzip encoded data + + Encode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: + raise NotImplementedError + f = BytesIO() + gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1) + gzf.write(data) + gzf.close() + encoded = f.getvalue() + f.close() + return encoded + +## +# Decode a string using the gzip content encoding such as specified by the +# Content-Encoding: gzip +# in the HTTP header, as described in RFC 1952 +# +# @param data The encoded data +# @return the unencoded data +# @raises ValueError if data is not correctly coded. + +def gzip_decode(data): + """gzip encoded data -> unencoded data + + Decode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: + raise NotImplementedError + f = BytesIO(data) + gzf = gzip.GzipFile(mode="rb", fileobj=f) + try: + decoded = gzf.read() + except IOError: + raise ValueError("invalid data") + f.close() + gzf.close() + return decoded + +## +# Return a decoded file-like object for the gzip encoding +# as described in RFC 1952. +# +# @param response A stream supporting a read() method +# @return a file-like object that the decoded data can be read() from + +class GzipDecodedResponse(gzip.GzipFile if gzip else object): + """a file-like object to decode a response encoded with the gzip + method, as described in RFC 1952. + """ + def __init__(self, response): + #response doesn't support tell() and read(), required by + #GzipFile + if not gzip: + raise NotImplementedError + self.io = BytesIO(response.read()) + gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io) + + def close(self): + gzip.GzipFile.close(self) + self.io.close() + + +# -------------------------------------------------------------------- +# request dispatcher + +class _Method(object): + # some magic to bind an XML-RPC method to an RPC server. + # supports "nested" methods (e.g. examples.getStateName) + def __init__(self, send, name): + self.__send = send + self.__name = name + def __getattr__(self, name): + return _Method(self.__send, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + return self.__send(self.__name, args) + +## +# Standard transport class for XML-RPC over HTTP. +#

+# You can create custom transports by subclassing this method, and +# overriding selected methods. + +class Transport(object): + """Handles an HTTP transaction to an XML-RPC server.""" + + # client identifier (may be overridden) + user_agent = "Python-xmlrpc/%s" % __version__ + + #if true, we'll request gzip encoding + accept_gzip_encoding = True + + # if positive, encode request using gzip if it exceeds this threshold + # note that many server will get confused, so only use it if you know + # that they can decode such a request + encode_threshold = None #None = don't encode + + def __init__(self, use_datetime=False, use_builtin_types=False): + self._use_datetime = use_datetime + self._use_builtin_types = use_builtin_types + self._connection = (None, None) + self._extra_headers = [] + + ## + # Send a complete request, and parse the response. + # Retry request if a cached connection has disconnected. + # + # @param host Target host. + # @param handler Target PRC handler. + # @param request_body XML-RPC request body. + # @param verbose Debugging flag. + # @return Parsed response. + + def request(self, host, handler, request_body, verbose=False): + #retry request once if cached connection has gone cold + for i in (0, 1): + try: + return self.single_request(host, handler, request_body, verbose) + except socket.error as e: + if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE): + raise + except http_client.BadStatusLine: #close after we sent request + if i: + raise + + def single_request(self, host, handler, request_body, verbose=False): + # issue XML-RPC request + try: + http_conn = self.send_request(host, handler, request_body, verbose) + resp = http_conn.getresponse() + if resp.status == 200: + self.verbose = verbose + return self.parse_response(resp) + + except Fault: + raise + except Exception: + #All unexpected errors leave connection in + # a strange state, so we clear it. + self.close() + raise + + #We got an error response. + #Discard any response data and raise exception + if resp.getheader("content-length", ""): + resp.read() + raise ProtocolError( + host + handler, + resp.status, resp.reason, + dict(resp.getheaders()) + ) + + + ## + # Create parser. + # + # @return A 2-tuple containing a parser and a unmarshaller. + + def getparser(self): + # get parser and unmarshaller + return getparser(use_datetime=self._use_datetime, + use_builtin_types=self._use_builtin_types) + + ## + # Get authorization info from host parameter + # Host may be a string, or a (host, x509-dict) tuple; if a string, + # it is checked for a "user:pw@host" format, and a "Basic + # Authentication" header is added if appropriate. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @return A 3-tuple containing (actual host, extra headers, + # x509 info). The header and x509 fields may be None. + + def get_host_info(self, host): + + x509 = {} + if isinstance(host, tuple): + host, x509 = host + + auth, host = urllib_parse.splituser(host) + + if auth: + auth = urllib_parse.unquote_to_bytes(auth) + auth = base64.encodebytes(auth).decode("utf-8") + auth = "".join(auth.split()) # get rid of whitespace + extra_headers = [ + ("Authorization", "Basic " + auth) + ] + else: + extra_headers = [] + + return host, extra_headers, x509 + + ## + # Connect to server. + # + # @param host Target host. + # @return An HTTPConnection object + + def make_connection(self, host): + #return an existing connection if possible. This allows + #HTTP/1.1 keep-alive. + if self._connection and host == self._connection[0]: + return self._connection[1] + # create a HTTP connection object from a host descriptor + chost, self._extra_headers, x509 = self.get_host_info(host) + self._connection = host, http_client.HTTPConnection(chost) + return self._connection[1] + + ## + # Clear any cached connection object. + # Used in the event of socket errors. + # + def close(self): + if self._connection[1]: + self._connection[1].close() + self._connection = (None, None) + + ## + # Send HTTP request. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @param handler Targer RPC handler (a path relative to host) + # @param request_body The XML-RPC request body + # @param debug Enable debugging if debug is true. + # @return An HTTPConnection. + + def send_request(self, host, handler, request_body, debug): + connection = self.make_connection(host) + headers = self._extra_headers[:] + if debug: + connection.set_debuglevel(1) + if self.accept_gzip_encoding and gzip: + connection.putrequest("POST", handler, skip_accept_encoding=True) + headers.append(("Accept-Encoding", "gzip")) + else: + connection.putrequest("POST", handler) + headers.append(("Content-Type", "text/xml")) + headers.append(("User-Agent", self.user_agent)) + self.send_headers(connection, headers) + self.send_content(connection, request_body) + return connection + + ## + # Send request headers. + # This function provides a useful hook for subclassing + # + # @param connection httpConnection. + # @param headers list of key,value pairs for HTTP headers + + def send_headers(self, connection, headers): + for key, val in headers: + connection.putheader(key, val) + + ## + # Send request body. + # This function provides a useful hook for subclassing + # + # @param connection httpConnection. + # @param request_body XML-RPC request body. + + def send_content(self, connection, request_body): + #optionally encode the request + if (self.encode_threshold is not None and + self.encode_threshold < len(request_body) and + gzip): + connection.putheader("Content-Encoding", "gzip") + request_body = gzip_encode(request_body) + + connection.putheader("Content-Length", str(len(request_body))) + connection.endheaders(request_body) + + ## + # Parse response. + # + # @param file Stream. + # @return Response tuple and target method. + + def parse_response(self, response): + # read response data from httpresponse, and parse it + # Check for new http response object, otherwise it is a file object. + if hasattr(response, 'getheader'): + if response.getheader("Content-Encoding", "") == "gzip": + stream = GzipDecodedResponse(response) + else: + stream = response + else: + stream = response + + p, u = self.getparser() + + while 1: + data = stream.read(1024) + if not data: + break + if self.verbose: + print("body:", repr(data)) + p.feed(data) + + if stream is not response: + stream.close() + p.close() + + return u.close() + +## +# Standard transport class for XML-RPC over HTTPS. + +class SafeTransport(Transport): + """Handles an HTTPS transaction to an XML-RPC server.""" + + # FIXME: mostly untested + + def make_connection(self, host): + if self._connection and host == self._connection[0]: + return self._connection[1] + + if not hasattr(http_client, "HTTPSConnection"): + raise NotImplementedError( + "your version of http.client doesn't support HTTPS") + # create a HTTPS connection object from a host descriptor + # host may be a string, or a (host, x509-dict) tuple + chost, self._extra_headers, x509 = self.get_host_info(host) + self._connection = host, http_client.HTTPSConnection(chost, + None, **(x509 or {})) + return self._connection[1] + +## +# Standard server proxy. This class establishes a virtual connection +# to an XML-RPC server. +#

+# This class is available as ServerProxy and Server. New code should +# use ServerProxy, to avoid confusion. +# +# @def ServerProxy(uri, **options) +# @param uri The connection point on the server. +# @keyparam transport A transport factory, compatible with the +# standard transport class. +# @keyparam encoding The default encoding used for 8-bit strings +# (default is UTF-8). +# @keyparam verbose Use a true value to enable debugging output. +# (printed to standard output). +# @see Transport + +class ServerProxy(object): + """uri [,options] -> a logical connection to an XML-RPC server + + uri is the connection point on the server, given as + scheme://host/target. + + The standard implementation always supports the "http" scheme. If + SSL socket support is available (Python 2.0), it also supports + "https". + + If the target part and the slash preceding it are both omitted, + "/RPC2" is assumed. + + The following options can be given as keyword arguments: + + transport: a transport factory + encoding: the request encoding (default is UTF-8) + + All 8-bit strings passed to the server proxy are assumed to use + the given encoding. + """ + + def __init__(self, uri, transport=None, encoding=None, verbose=False, + allow_none=False, use_datetime=False, use_builtin_types=False): + # establish a "logical" server connection + + # get the url + type, uri = urllib_parse.splittype(uri) + if type not in ("http", "https"): + raise IOError("unsupported XML-RPC protocol") + self.__host, self.__handler = urllib_parse.splithost(uri) + if not self.__handler: + self.__handler = "/RPC2" + + if transport is None: + if type == "https": + handler = SafeTransport + else: + handler = Transport + transport = handler(use_datetime=use_datetime, + use_builtin_types=use_builtin_types) + self.__transport = transport + + self.__encoding = encoding or 'utf-8' + self.__verbose = verbose + self.__allow_none = allow_none + + def __close(self): + self.__transport.close() + + def __request(self, methodname, params): + # call a method on the remote server + + request = dumps(params, methodname, encoding=self.__encoding, + allow_none=self.__allow_none).encode(self.__encoding) + + response = self.__transport.request( + self.__host, + self.__handler, + request, + verbose=self.__verbose + ) + + if len(response) == 1: + response = response[0] + + return response + + def __repr__(self): + return ( + "" % + (self.__host, self.__handler) + ) + + __str__ = __repr__ + + def __getattr__(self, name): + # magic method dispatcher + return _Method(self.__request, name) + + # note: to call a remote object with an non-standard name, use + # result getattr(server, "strange-python-name")(args) + + def __call__(self, attr): + """A workaround to get special attributes on the ServerProxy + without interfering with the magic __getattr__ + """ + if attr == "close": + return self.__close + elif attr == "transport": + return self.__transport + raise AttributeError("Attribute %r not found" % (attr,)) + +# compatibility + +Server = ServerProxy + +# -------------------------------------------------------------------- +# test code + +if __name__ == "__main__": + + # simple test program (from the XML-RPC specification) + + # local server, available from Lib/xmlrpc/server.py + server = ServerProxy("http://localhost:8000") + + try: + print(server.currentTime.getCurrentTime()) + except Error as v: + print("ERROR", v) + + multi = MultiCall(server) + multi.getData() + multi.pow(2,9) + multi.add(1,2) + try: + for response in multi(): + print(response) + except Error as v: + print("ERROR", v) diff --git a/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/server.py b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/server.py new file mode 100644 index 000000000..28072bfec --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/backports/xmlrpc/server.py @@ -0,0 +1,999 @@ +r""" +Ported using Python-Future from the Python 3.3 standard library. + +XML-RPC Servers. + +This module can be used to create simple XML-RPC servers +by creating a server and either installing functions, a +class instance, or by extending the SimpleXMLRPCServer +class. + +It can also be used to handle XML-RPC requests in a CGI +environment using CGIXMLRPCRequestHandler. + +The Doc* classes can be used to create XML-RPC servers that +serve pydoc-style documentation in response to HTTP +GET requests. This documentation is dynamically generated +based on the functions and methods registered with the +server. + +A list of possible usage patterns follows: + +1. Install functions: + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_function(pow) +server.register_function(lambda x,y: x+y, 'add') +server.serve_forever() + +2. Install an instance: + +class MyFuncs: + def __init__(self): + # make all of the sys functions available through sys.func_name + import sys + self.sys = sys + def _listMethods(self): + # implement this method so that system.listMethods + # knows to advertise the sys methods + return list_public_methods(self) + \ + ['sys.' + method for method in list_public_methods(self.sys)] + def pow(self, x, y): return pow(x, y) + def add(self, x, y) : return x + y + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(MyFuncs()) +server.serve_forever() + +3. Install an instance with custom dispatch method: + +class Math: + def _listMethods(self): + # this method must be present for system.listMethods + # to work + return ['add', 'pow'] + def _methodHelp(self, method): + # this method must be present for system.methodHelp + # to work + if method == 'add': + return "add(2,3) => 5" + elif method == 'pow': + return "pow(x, y[, z]) => number" + else: + # By convention, return empty + # string if no help is available + return "" + def _dispatch(self, method, params): + if method == 'pow': + return pow(*params) + elif method == 'add': + return params[0] + params[1] + else: + raise ValueError('bad method') + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(Math()) +server.serve_forever() + +4. Subclass SimpleXMLRPCServer: + +class MathServer(SimpleXMLRPCServer): + def _dispatch(self, method, params): + try: + # We are forcing the 'export_' prefix on methods that are + # callable through XML-RPC to prevent potential security + # problems + func = getattr(self, 'export_' + method) + except AttributeError: + raise Exception('method "%s" is not supported' % method) + else: + return func(*params) + + def export_add(self, x, y): + return x + y + +server = MathServer(("localhost", 8000)) +server.serve_forever() + +5. CGI script: + +server = CGIXMLRPCRequestHandler() +server.register_function(pow) +server.handle_request() +""" + +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import int, str + +# Written by Brian Quinlan (brian@sweetapp.com). +# Based on code written by Fredrik Lundh. + +from future.backports.xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode +from future.backports.http.server import BaseHTTPRequestHandler +import future.backports.http.server as http_server +from future.backports import socketserver +import sys +import os +import re +import pydoc +import inspect +import traceback +try: + import fcntl +except ImportError: + fcntl = None + +def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): + """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d + + Resolves a dotted attribute name to an object. Raises + an AttributeError if any attribute in the chain starts with a '_'. + + If the optional allow_dotted_names argument is false, dots are not + supported and this function operates similar to getattr(obj, attr). + """ + + if allow_dotted_names: + attrs = attr.split('.') + else: + attrs = [attr] + + for i in attrs: + if i.startswith('_'): + raise AttributeError( + 'attempt to access private attribute "%s"' % i + ) + else: + obj = getattr(obj,i) + return obj + +def list_public_methods(obj): + """Returns a list of attribute strings, found in the specified + object, which represent callable attributes""" + + return [member for member in dir(obj) + if not member.startswith('_') and + callable(getattr(obj, member))] + +class SimpleXMLRPCDispatcher(object): + """Mix-in class that dispatches XML-RPC requests. + + This class is used to register XML-RPC method handlers + and then to dispatch them. This class doesn't need to be + instanced directly when used by SimpleXMLRPCServer but it + can be instanced when used by the MultiPathXMLRPCServer + """ + + def __init__(self, allow_none=False, encoding=None, + use_builtin_types=False): + self.funcs = {} + self.instance = None + self.allow_none = allow_none + self.encoding = encoding or 'utf-8' + self.use_builtin_types = use_builtin_types + + def register_instance(self, instance, allow_dotted_names=False): + """Registers an instance to respond to XML-RPC requests. + + Only one instance can be installed at a time. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. Methods beginning with an '_' + are considered private and will not be called by + SimpleXMLRPCServer. + + If a registered function matches a XML-RPC request, then it + will be called instead of the registered instance. + + If the optional allow_dotted_names argument is true and the + instance does not have a _dispatch method, method names + containing dots are supported and resolved, as long as none of + the name segments start with an '_'. + + *** SECURITY WARNING: *** + + Enabling the allow_dotted_names options allows intruders + to access your module's global variables and may allow + intruders to execute arbitrary code on your machine. Only + use this option on a secure, closed network. + + """ + + self.instance = instance + self.allow_dotted_names = allow_dotted_names + + def register_function(self, function, name=None): + """Registers a function to respond to XML-RPC requests. + + The optional name argument can be used to set a Unicode name + for the function. + """ + + if name is None: + name = function.__name__ + self.funcs[name] = function + + def register_introspection_functions(self): + """Registers the XML-RPC introspection methods in the system + namespace. + + see http://xmlrpc.usefulinc.com/doc/reserved.html + """ + + self.funcs.update({'system.listMethods' : self.system_listMethods, + 'system.methodSignature' : self.system_methodSignature, + 'system.methodHelp' : self.system_methodHelp}) + + def register_multicall_functions(self): + """Registers the XML-RPC multicall method in the system + namespace. + + see http://www.xmlrpc.com/discuss/msgReader$1208""" + + self.funcs.update({'system.multicall' : self.system_multicall}) + + def _marshaled_dispatch(self, data, dispatch_method = None, path = None): + """Dispatches an XML-RPC method from marshalled (XML) data. + + XML-RPC methods are dispatched from the marshalled (XML) data + using the _dispatch method and the result is returned as + marshalled data. For backwards compatibility, a dispatch + function can be provided as an argument (see comment in + SimpleXMLRPCRequestHandler.do_POST) but overriding the + existing method through subclassing is the preferred means + of changing method dispatch behavior. + """ + + try: + params, method = loads(data, use_builtin_types=self.use_builtin_types) + + # generate response + if dispatch_method is not None: + response = dispatch_method(method, params) + else: + response = self._dispatch(method, params) + # wrap response in a singleton tuple + response = (response,) + response = dumps(response, methodresponse=1, + allow_none=self.allow_none, encoding=self.encoding) + except Fault as fault: + response = dumps(fault, allow_none=self.allow_none, + encoding=self.encoding) + except: + # report exception back to server + exc_type, exc_value, exc_tb = sys.exc_info() + response = dumps( + Fault(1, "%s:%s" % (exc_type, exc_value)), + encoding=self.encoding, allow_none=self.allow_none, + ) + + return response.encode(self.encoding) + + def system_listMethods(self): + """system.listMethods() => ['add', 'subtract', 'multiple'] + + Returns a list of the methods supported by the server.""" + + methods = set(self.funcs.keys()) + if self.instance is not None: + # Instance can implement _listMethod to return a list of + # methods + if hasattr(self.instance, '_listMethods'): + methods |= set(self.instance._listMethods()) + # if the instance has a _dispatch method then we + # don't have enough information to provide a list + # of methods + elif not hasattr(self.instance, '_dispatch'): + methods |= set(list_public_methods(self.instance)) + return sorted(methods) + + def system_methodSignature(self, method_name): + """system.methodSignature('add') => [double, int, int] + + Returns a list describing the signature of the method. In the + above example, the add method takes two integers as arguments + and returns a double result. + + This server does NOT support system.methodSignature.""" + + # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html + + return 'signatures not supported' + + def system_methodHelp(self, method_name): + """system.methodHelp('add') => "Adds two integers together" + + Returns a string containing documentation for the specified method.""" + + method = None + if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + # Instance can implement _methodHelp to return help for a method + if hasattr(self.instance, '_methodHelp'): + return self.instance._methodHelp(method_name) + # if the instance has a _dispatch method then we + # don't have enough information to provide help + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name, + self.allow_dotted_names + ) + except AttributeError: + pass + + # Note that we aren't checking that the method actually + # be a callable object of some kind + if method is None: + return "" + else: + return pydoc.getdoc(method) + + def system_multicall(self, call_list): + """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \ +[[4], ...] + + Allows the caller to package multiple XML-RPC calls into a single + request. + + See http://www.xmlrpc.com/discuss/msgReader$1208 + """ + + results = [] + for call in call_list: + method_name = call['methodName'] + params = call['params'] + + try: + # XXX A marshalling error in any response will fail the entire + # multicall. If someone cares they should fix this. + results.append([self._dispatch(method_name, params)]) + except Fault as fault: + results.append( + {'faultCode' : fault.faultCode, + 'faultString' : fault.faultString} + ) + except: + exc_type, exc_value, exc_tb = sys.exc_info() + results.append( + {'faultCode' : 1, + 'faultString' : "%s:%s" % (exc_type, exc_value)} + ) + return results + + def _dispatch(self, method, params): + """Dispatches the XML-RPC method. + + XML-RPC calls are forwarded to a registered function that + matches the called XML-RPC method name. If no such function + exists then the call is forwarded to the registered instance, + if available. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. + + Methods beginning with an '_' are considered private and will + not be called. + """ + + func = None + try: + # check to see if a matching function has been registered + func = self.funcs[method] + except KeyError: + if self.instance is not None: + # check for a _dispatch method + if hasattr(self.instance, '_dispatch'): + return self.instance._dispatch(method, params) + else: + # call instance method directly + try: + func = resolve_dotted_attribute( + self.instance, + method, + self.allow_dotted_names + ) + except AttributeError: + pass + + if func is not None: + return func(*params) + else: + raise Exception('method "%s" is not supported' % method) + +class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler): + """Simple XML-RPC request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + """ + + # Class attribute listing the accessible path components; + # paths not on this list will result in a 404 error. + rpc_paths = ('/', '/RPC2') + + #if not None, encode responses larger than this, if possible + encode_threshold = 1400 #a common MTU + + #Override form StreamRequestHandler: full buffering of output + #and no Nagle. + wbufsize = -1 + disable_nagle_algorithm = True + + # a re to match a gzip Accept-Encoding + aepattern = re.compile(r""" + \s* ([^\s;]+) \s* #content-coding + (;\s* q \s*=\s* ([0-9\.]+))? #q + """, re.VERBOSE | re.IGNORECASE) + + def accept_encodings(self): + r = {} + ae = self.headers.get("Accept-Encoding", "") + for e in ae.split(","): + match = self.aepattern.match(e) + if match: + v = match.group(3) + v = float(v) if v else 1.0 + r[match.group(1)] = v + return r + + def is_rpc_path_valid(self): + if self.rpc_paths: + return self.path in self.rpc_paths + else: + # If .rpc_paths is empty, just assume all paths are legal + return True + + def do_POST(self): + """Handles the HTTP POST request. + + Attempts to interpret all HTTP POST requests as XML-RPC calls, + which are forwarded to the server's _dispatch method for handling. + """ + + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + try: + # Get arguments by reading body of request. + # We read this in chunks to avoid straining + # socket.read(); around the 10 or 15Mb mark, some platforms + # begin to have problems (bug #792570). + max_chunk_size = 10*1024*1024 + size_remaining = int(self.headers["content-length"]) + L = [] + while size_remaining: + chunk_size = min(size_remaining, max_chunk_size) + chunk = self.rfile.read(chunk_size) + if not chunk: + break + L.append(chunk) + size_remaining -= len(L[-1]) + data = b''.join(L) + + data = self.decode_request_content(data) + if data is None: + return #response has been sent + + # In previous versions of SimpleXMLRPCServer, _dispatch + # could be overridden in this class, instead of in + # SimpleXMLRPCDispatcher. To maintain backwards compatibility, + # check to see if a subclass implements _dispatch and dispatch + # using that method if present. + response = self.server._marshaled_dispatch( + data, getattr(self, '_dispatch', None), self.path + ) + except Exception as e: # This should only happen if the module is buggy + # internal error, report as HTTP server error + self.send_response(500) + + # Send information about the exception if requested + if hasattr(self.server, '_send_traceback_header') and \ + self.server._send_traceback_header: + self.send_header("X-exception", str(e)) + trace = traceback.format_exc() + trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII') + self.send_header("X-traceback", trace) + + self.send_header("Content-length", "0") + self.end_headers() + else: + self.send_response(200) + self.send_header("Content-type", "text/xml") + if self.encode_threshold is not None: + if len(response) > self.encode_threshold: + q = self.accept_encodings().get("gzip", 0) + if q: + try: + response = gzip_encode(response) + self.send_header("Content-Encoding", "gzip") + except NotImplementedError: + pass + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + def decode_request_content(self, data): + #support gzip encoding of request + encoding = self.headers.get("content-encoding", "identity").lower() + if encoding == "identity": + return data + if encoding == "gzip": + try: + return gzip_decode(data) + except NotImplementedError: + self.send_response(501, "encoding %r not supported" % encoding) + except ValueError: + self.send_response(400, "error decoding gzip content") + else: + self.send_response(501, "encoding %r not supported" % encoding) + self.send_header("Content-length", "0") + self.end_headers() + + def report_404 (self): + # Report a 404 error + self.send_response(404) + response = b'No such page' + self.send_header("Content-type", "text/plain") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + def log_request(self, code='-', size='-'): + """Selectively log an accepted request.""" + + if self.server.logRequests: + BaseHTTPRequestHandler.log_request(self, code, size) + +class SimpleXMLRPCServer(socketserver.TCPServer, + SimpleXMLRPCDispatcher): + """Simple XML-RPC server. + + Simple XML-RPC server that allows functions and a single instance + to be installed to handle requests. The default implementation + attempts to dispatch XML-RPC calls to the functions or instance + installed in the server. Override the _dispatch method inherited + from SimpleXMLRPCDispatcher to change this behavior. + """ + + allow_reuse_address = True + + # Warning: this is for debugging purposes only! Never set this to True in + # production code, as will be sending out sensitive information (exception + # and stack trace details) when exceptions are raised inside + # SimpleXMLRPCRequestHandler.do_POST + _send_traceback_header = False + + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + self.logRequests = logRequests + + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) + socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate) + + # [Bug #1222790] If possible, set close-on-exec flag; if a + # method spawns a subprocess, the subprocess shouldn't have + # the listening socket open. + if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): + flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) + +class MultiPathXMLRPCServer(SimpleXMLRPCServer): + """Multipath XML-RPC Server + This specialization of SimpleXMLRPCServer allows the user to create + multiple Dispatcher instances and assign them to different + HTTP request paths. This makes it possible to run two or more + 'virtual XML-RPC servers' at the same port. + Make sure that the requestHandler accepts the paths in question. + """ + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + + SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none, + encoding, bind_and_activate, use_builtin_types) + self.dispatchers = {} + self.allow_none = allow_none + self.encoding = encoding or 'utf-8' + + def add_dispatcher(self, path, dispatcher): + self.dispatchers[path] = dispatcher + return dispatcher + + def get_dispatcher(self, path): + return self.dispatchers[path] + + def _marshaled_dispatch(self, data, dispatch_method = None, path = None): + try: + response = self.dispatchers[path]._marshaled_dispatch( + data, dispatch_method, path) + except: + # report low level exception back to server + # (each dispatcher should have handled their own + # exceptions) + exc_type, exc_value = sys.exc_info()[:2] + response = dumps( + Fault(1, "%s:%s" % (exc_type, exc_value)), + encoding=self.encoding, allow_none=self.allow_none) + response = response.encode(self.encoding) + return response + +class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + """Simple handler for XML-RPC data passed through CGI.""" + + def __init__(self, allow_none=False, encoding=None, use_builtin_types=False): + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) + + def handle_xmlrpc(self, request_text): + """Handle a single XML-RPC request""" + + response = self._marshaled_dispatch(request_text) + + print('Content-Type: text/xml') + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def handle_get(self): + """Handle a single HTTP GET request. + + Default implementation indicates an error because + XML-RPC uses the POST method. + """ + + code = 400 + message, explain = BaseHTTPRequestHandler.responses[code] + + response = http_server.DEFAULT_ERROR_MESSAGE % \ + { + 'code' : code, + 'message' : message, + 'explain' : explain + } + response = response.encode('utf-8') + print('Status: %d %s' % (code, message)) + print('Content-Type: %s' % http_server.DEFAULT_ERROR_CONTENT_TYPE) + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def handle_request(self, request_text=None): + """Handle a single XML-RPC request passed through a CGI post method. + + If no XML data is given then it is read from stdin. The resulting + XML-RPC response is printed to stdout along with the correct HTTP + headers. + """ + + if request_text is None and \ + os.environ.get('REQUEST_METHOD', None) == 'GET': + self.handle_get() + else: + # POST data is normally available through stdin + try: + length = int(os.environ.get('CONTENT_LENGTH', None)) + except (ValueError, TypeError): + length = -1 + if request_text is None: + request_text = sys.stdin.read(length) + + self.handle_xmlrpc(request_text) + + +# ----------------------------------------------------------------------------- +# Self documenting XML-RPC Server. + +class ServerHTMLDoc(pydoc.HTMLDoc): + """Class used to generate pydoc HTML document for a server""" + + def markup(self, text, escape=None, funcs={}, classes={}, methods={}): + """Mark up some plain text, given a context of symbols to look for. + Each context dictionary maps object names to anchor names.""" + escape = escape or self.escape + results = [] + here = 0 + + # XXX Note that this regular expression does not allow for the + # hyperlinking of arbitrary strings being used as method + # names. Only methods with names consisting of word characters + # and '.'s are hyperlinked. + pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|' + r'RFC[- ]?(\d+)|' + r'PEP[- ]?(\d+)|' + r'(self\.)?((?:\w|\.)+))\b') + while 1: + match = pattern.search(text, here) + if not match: break + start, end = match.span() + results.append(escape(text[here:start])) + + all, scheme, rfc, pep, selfdot, name = match.groups() + if scheme: + url = escape(all).replace('"', '"') + results.append('%s' % (url, url)) + elif rfc: + url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) + results.append('%s' % (url, escape(all))) + elif pep: + url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) + results.append('%s' % (url, escape(all))) + elif text[end:end+1] == '(': + results.append(self.namelink(name, methods, funcs, classes)) + elif selfdot: + results.append('self.%s' % name) + else: + results.append(self.namelink(name, classes)) + here = end + results.append(escape(text[here:])) + return ''.join(results) + + def docroutine(self, object, name, mod=None, + funcs={}, classes={}, methods={}, cl=None): + """Produce HTML documentation for a function or method object.""" + + anchor = (cl and cl.__name__ or '') + '-' + name + note = '' + + title = '%s' % ( + self.escape(anchor), self.escape(name)) + + if inspect.ismethod(object): + args = inspect.getfullargspec(object) + # exclude the argument bound to the instance, it will be + # confusing to the non-Python user + argspec = inspect.formatargspec ( + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue + ) + elif inspect.isfunction(object): + args = inspect.getfullargspec(object) + argspec = inspect.formatargspec( + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) + else: + argspec = '(...)' + + if isinstance(object, tuple): + argspec = object[0] or argspec + docstring = object[1] or "" + else: + docstring = pydoc.getdoc(object) + + decl = title + argspec + (note and self.grey( + '%s' % note)) + + doc = self.markup( + docstring, self.preformat, funcs, classes, methods) + doc = doc and '

%s
' % doc + return '
%s
%s
\n' % (decl, doc) + + def docserver(self, server_name, package_documentation, methods): + """Produce HTML documentation for an XML-RPC server.""" + + fdict = {} + for key, value in methods.items(): + fdict[key] = '#-' + key + fdict[value] = fdict[key] + + server_name = self.escape(server_name) + head = '%s' % server_name + result = self.heading(head, '#ffffff', '#7799ee') + + doc = self.markup(package_documentation, self.preformat, fdict) + doc = doc and '%s' % doc + result = result + '

%s

\n' % doc + + contents = [] + method_items = sorted(methods.items()) + for key, value in method_items: + contents.append(self.docroutine(value, key, funcs=fdict)) + result = result + self.bigsection( + 'Methods', '#ffffff', '#eeaa77', ''.join(contents)) + + return result + +class XMLRPCDocGenerator(object): + """Generates documentation for an XML-RPC server. + + This class is designed as mix-in and should not + be constructed directly. + """ + + def __init__(self): + # setup variables used for HTML documentation + self.server_name = 'XML-RPC Server Documentation' + self.server_documentation = \ + "This server exports the following methods through the XML-RPC "\ + "protocol." + self.server_title = 'XML-RPC Server Documentation' + + def set_server_title(self, server_title): + """Set the HTML title of the generated server documentation""" + + self.server_title = server_title + + def set_server_name(self, server_name): + """Set the name of the generated HTML server documentation""" + + self.server_name = server_name + + def set_server_documentation(self, server_documentation): + """Set the documentation string for the entire server.""" + + self.server_documentation = server_documentation + + def generate_html_documentation(self): + """generate_html_documentation() => html documentation for the server + + Generates HTML documentation for the server using introspection for + installed functions and instances that do not implement the + _dispatch method. Alternatively, instances can choose to implement + the _get_method_argstring(method_name) method to provide the + argument string used in the documentation and the + _methodHelp(method_name) method to provide the help text used + in the documentation.""" + + methods = {} + + for method_name in self.system_listMethods(): + if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + method_info = [None, None] # argspec, documentation + if hasattr(self.instance, '_get_method_argstring'): + method_info[0] = self.instance._get_method_argstring(method_name) + if hasattr(self.instance, '_methodHelp'): + method_info[1] = self.instance._methodHelp(method_name) + + method_info = tuple(method_info) + if method_info != (None, None): + method = method_info + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name + ) + except AttributeError: + method = method_info + else: + method = method_info + else: + assert 0, "Could not find method in self.functions and no "\ + "instance installed" + + methods[method_name] = method + + documenter = ServerHTMLDoc() + documentation = documenter.docserver( + self.server_name, + self.server_documentation, + methods + ) + + return documenter.page(self.server_title, documentation) + +class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): + """XML-RPC and documentation request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + + Handles all HTTP GET requests and interprets them as requests + for documentation. + """ + + def do_GET(self): + """Handles the HTTP GET request. + + Interpret all HTTP GET requests as requests for server + documentation. + """ + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + response = self.server.generate_html_documentation().encode('utf-8') + self.send_response(200) + self.send_header("Content-type", "text/html") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + +class DocXMLRPCServer( SimpleXMLRPCServer, + XMLRPCDocGenerator): + """XML-RPC and HTML documentation server. + + Adds the ability to serve server documentation to the capabilities + of SimpleXMLRPCServer. + """ + + def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, + allow_none, encoding, bind_and_activate, + use_builtin_types) + XMLRPCDocGenerator.__init__(self) + +class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler, + XMLRPCDocGenerator): + """Handler for XML-RPC data and documentation requests passed through + CGI""" + + def handle_get(self): + """Handles the HTTP GET request. + + Interpret all HTTP GET requests as requests for server + documentation. + """ + + response = self.generate_html_documentation().encode('utf-8') + + print('Content-Type: text/html') + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def __init__(self): + CGIXMLRPCRequestHandler.__init__(self) + XMLRPCDocGenerator.__init__(self) + + +if __name__ == '__main__': + import datetime + + class ExampleService: + def getData(self): + return '42' + + class currentTime: + @staticmethod + def getCurrentTime(): + return datetime.datetime.now() + + server = SimpleXMLRPCServer(("localhost", 8000)) + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_instance(ExampleService(), allow_dotted_names=True) + server.register_multicall_functions() + print('Serving XML-RPC on localhost port 8000') + print('It is advisable to run this example server within a secure, closed network.') + try: + server.serve_forever() + except KeyboardInterrupt: + print("\nKeyboard interrupt received, exiting.") + server.server_close() + sys.exit(0) diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/__init__.py new file mode 100644 index 000000000..216465a15 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/__init__.py @@ -0,0 +1,51 @@ +""" +A module that brings in equivalents of the new and modified Python 3 +builtins into Py2. Has no effect on Py3. + +See the docs `here `_ +(``docs/what-else.rst``) for more information. + +""" + +from future.builtins.iterators import (filter, map, zip) +# The isinstance import is no longer needed. We provide it only for +# backward-compatibility with future v0.8.2. It will be removed in future v1.0. +from future.builtins.misc import (ascii, chr, hex, input, isinstance, next, + oct, open, pow, round, super) +from future.utils import PY3 + +if PY3: + import builtins + bytes = builtins.bytes + dict = builtins.dict + int = builtins.int + list = builtins.list + object = builtins.object + range = builtins.range + str = builtins.str + __all__ = [] +else: + from future.types import (newbytes as bytes, + newdict as dict, + newint as int, + newlist as list, + newobject as object, + newrange as range, + newstr as str) +from future import utils + + +if not utils.PY3: + # We only import names that shadow the builtins on Py2. No other namespace + # pollution on Py2. + + # Only shadow builtins on Py2; no new names + __all__ = ['filter', 'map', 'zip', + 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow', + 'round', 'super', + 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', + ] + +else: + # No namespace pollution on Py3 + __all__ = [] diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/disabled.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/disabled.py new file mode 100644 index 000000000..f6d6ea9b8 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/disabled.py @@ -0,0 +1,66 @@ +""" +This disables builtin functions (and one exception class) which are +removed from Python 3.3. + +This module is designed to be used like this:: + + from future.builtins.disabled import * + +This disables the following obsolete Py2 builtin functions:: + + apply, cmp, coerce, execfile, file, input, long, + raw_input, reduce, reload, unicode, xrange + +We don't hack __builtin__, which is very fragile because it contaminates +imported modules too. Instead, we just create new functions with +the same names as the obsolete builtins from Python 2 which raise +NameError exceptions when called. + +Note that both ``input()`` and ``raw_input()`` are among the disabled +functions (in this module). Although ``input()`` exists as a builtin in +Python 3, the Python 2 ``input()`` builtin is unsafe to use because it +can lead to shell injection. Therefore we shadow it by default upon ``from +future.builtins.disabled import *``, in case someone forgets to import our +replacement ``input()`` somehow and expects Python 3 semantics. + +See the ``future.builtins.misc`` module for a working version of +``input`` with Python 3 semantics. + +(Note that callable() is not among the functions disabled; this was +reintroduced into Python 3.2.) + +This exception class is also disabled: + + StandardError + +""" + +from __future__ import division, absolute_import, print_function + +from future import utils + + +OBSOLETE_BUILTINS = ['apply', 'chr', 'cmp', 'coerce', 'execfile', 'file', + 'input', 'long', 'raw_input', 'reduce', 'reload', + 'unicode', 'xrange', 'StandardError'] + + +def disabled_function(name): + ''' + Returns a function that cannot be called + ''' + def disabled(*args, **kwargs): + ''' + A function disabled by the ``future`` module. This function is + no longer a builtin in Python 3. + ''' + raise NameError('obsolete Python 2 builtin {0} is disabled'.format(name)) + return disabled + + +if not utils.PY3: + for fname in OBSOLETE_BUILTINS: + locals()[fname] = disabled_function(fname) + __all__ = OBSOLETE_BUILTINS +else: + __all__ = [] diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/iterators.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/iterators.py new file mode 100644 index 000000000..dff651e0f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/iterators.py @@ -0,0 +1,52 @@ +""" +This module is designed to be used as follows:: + + from future.builtins.iterators import * + +And then, for example:: + + for i in range(10**15): + pass + + for (a, b) in zip(range(10**15), range(-10**15, 0)): + pass + +Note that this is standard Python 3 code, plus some imports that do +nothing on Python 3. + +The iterators this brings in are:: + +- ``range`` +- ``filter`` +- ``map`` +- ``zip`` + +On Python 2, ``range`` is a pure-Python backport of Python 3's ``range`` +iterator with slicing support. The other iterators (``filter``, ``map``, +``zip``) are from the ``itertools`` module on Python 2. On Python 3 these +are available in the module namespace but not exported for * imports via +__all__ (zero no namespace pollution). + +Note that these are also available in the standard library +``future_builtins`` module on Python 2 -- but not Python 3, so using +the standard library version is not portable, nor anywhere near complete. +""" + +from __future__ import division, absolute_import, print_function + +import itertools +from future import utils + +if not utils.PY3: + filter = itertools.ifilter + map = itertools.imap + from future.types import newrange as range + zip = itertools.izip + __all__ = ['filter', 'map', 'range', 'zip'] +else: + import builtins + filter = builtins.filter + map = builtins.map + range = builtins.range + zip = builtins.zip + __all__ = [] diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/misc.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/misc.py new file mode 100644 index 000000000..90dc384ad --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/misc.py @@ -0,0 +1,124 @@ +""" +A module that brings in equivalents of various modified Python 3 builtins +into Py2. Has no effect on Py3. + +The builtin functions are: + +- ``ascii`` (from Py2's future_builtins module) +- ``hex`` (from Py2's future_builtins module) +- ``oct`` (from Py2's future_builtins module) +- ``chr`` (equivalent to ``unichr`` on Py2) +- ``input`` (equivalent to ``raw_input`` on Py2) +- ``next`` (calls ``__next__`` if it exists, else ``next`` method) +- ``open`` (equivalent to io.open on Py2) +- ``super`` (backport of Py3's magic zero-argument super() function +- ``round`` (new "Banker's Rounding" behaviour from Py3) + +``isinstance`` is also currently exported for backwards compatibility +with v0.8.2, although this has been deprecated since v0.9. + + +input() +------- +Like the new ``input()`` function from Python 3 (without eval()), except +that it returns bytes. Equivalent to Python 2's ``raw_input()``. + +Warning: By default, importing this module *removes* the old Python 2 +input() function entirely from ``__builtin__`` for safety. This is +because forgetting to import the new ``input`` from ``future`` might +otherwise lead to a security vulnerability (shell injection) on Python 2. + +To restore it, you can retrieve it yourself from +``__builtin__._old_input``. + +Fortunately, ``input()`` seems to be seldom used in the wild in Python +2... + +""" + +from future import utils + + +if utils.PY2: + from io import open + from future_builtins import ascii, oct, hex + from __builtin__ import unichr as chr, pow as _builtin_pow + import __builtin__ + + # Only for backward compatibility with future v0.8.2: + isinstance = __builtin__.isinstance + + # Warning: Python 2's input() is unsafe and MUST not be able to be used + # accidentally by someone who expects Python 3 semantics but forgets + # to import it on Python 2. Versions of ``future`` prior to 0.11 + # deleted it from __builtin__. Now we keep in __builtin__ but shadow + # the name like all others. Just be sure to import ``input``. + + input = raw_input + + from future.builtins.newnext import newnext as next + from future.builtins.newround import newround as round + from future.builtins.newsuper import newsuper as super + from future.types.newint import newint + + _SENTINEL = object() + + def pow(x, y, z=_SENTINEL): + """ + pow(x, y[, z]) -> number + + With two arguments, equivalent to x**y. With three arguments, + equivalent to (x**y) % z, but may be more efficient (e.g. for ints). + """ + # Handle newints + if isinstance(x, newint): + x = long(x) + if isinstance(y, newint): + y = long(y) + if isinstance(z, newint): + z = long(z) + + try: + if z == _SENTINEL: + return _builtin_pow(x, y) + else: + return _builtin_pow(x, y, z) + except ValueError: + if z == _SENTINEL: + return _builtin_pow(x+0j, y) + else: + return _builtin_pow(x+0j, y, z) + + # ``future`` doesn't support Py3.0/3.1. If we ever did, we'd add this: + # callable = __builtin__.callable + + __all__ = ['ascii', 'chr', 'hex', 'input', 'isinstance', 'next', 'oct', + 'open', 'pow', 'round', 'super'] + +else: + import builtins + ascii = builtins.ascii + chr = builtins.chr + hex = builtins.hex + input = builtins.input + next = builtins.next + # Only for backward compatibility with future v0.8.2: + isinstance = builtins.isinstance + oct = builtins.oct + open = builtins.open + pow = builtins.pow + round = builtins.round + super = builtins.super + + __all__ = [] + + # The callable() function was removed from Py3.0 and 3.1 and + # reintroduced into Py3.2+. ``future`` doesn't support Py3.0/3.1. If we ever + # did, we'd add this: + # try: + # callable = builtins.callable + # except AttributeError: + # # Definition from Pandas + # def callable(obj): + # return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + # __all__.append('callable') diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/newnext.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/newnext.py new file mode 100644 index 000000000..097638ac1 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/newnext.py @@ -0,0 +1,70 @@ +''' +This module provides a newnext() function in Python 2 that mimics the +behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for +compatibility if this fails. + +``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this +doesn't exist, it falls back to calling a ``next()`` method. + +For example: + + >>> class Odds(object): + ... def __init__(self, start=1): + ... self.value = start - 2 + ... def __next__(self): # note the Py3 interface + ... self.value += 2 + ... return self.value + ... def __iter__(self): + ... return self + ... + >>> iterator = Odds() + >>> next(iterator) + 1 + >>> next(iterator) + 3 + +If you are defining your own custom iterator class as above, it is preferable +to explicitly decorate the class with the @implements_iterator decorator from +``future.utils`` as follows: + + >>> @implements_iterator + ... class Odds(object): + ... # etc + ... pass + +This next() function is primarily for consuming iterators defined in Python 3 +code elsewhere that we would like to run on Python 2 or 3. +''' + +_builtin_next = next + +_SENTINEL = object() + +def newnext(iterator, default=_SENTINEL): + """ + next(iterator[, default]) + + Return the next item from the iterator. If default is given and the iterator + is exhausted, it is returned instead of raising StopIteration. + """ + + # args = [] + # if default is not _SENTINEL: + # args.append(default) + try: + try: + return iterator.__next__() + except AttributeError: + try: + return iterator.next() + except AttributeError: + raise TypeError("'{0}' object is not an iterator".format( + iterator.__class__.__name__)) + except StopIteration as e: + if default is _SENTINEL: + raise e + else: + return default + + +__all__ = ['newnext'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/newround.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/newround.py new file mode 100644 index 000000000..3943ebb6e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/newround.py @@ -0,0 +1,99 @@ +""" +``python-future``: pure Python implementation of Python 3 round(). +""" + +from future.utils import PYPY, PY26, bind_method + +# Use the decimal module for simplicity of implementation (and +# hopefully correctness). +from decimal import Decimal, ROUND_HALF_EVEN + + +def newround(number, ndigits=None): + """ + See Python 3 documentation: uses Banker's Rounding. + + Delegates to the __round__ method if for some reason this exists. + + If not, rounds a number to a given precision in decimal digits (default + 0 digits). This returns an int when called with one argument, + otherwise the same type as the number. ndigits may be negative. + + See the test_round method in future/tests/test_builtins.py for + examples. + """ + return_int = False + if ndigits is None: + return_int = True + ndigits = 0 + if hasattr(number, '__round__'): + return number.__round__(ndigits) + + if ndigits < 0: + raise NotImplementedError('negative ndigits not supported yet') + exponent = Decimal('10') ** (-ndigits) + + if PYPY: + # Work around issue #24: round() breaks on PyPy with NumPy's types + if 'numpy' in repr(type(number)): + number = float(number) + + if not PY26: + d = Decimal.from_float(number).quantize(exponent, + rounding=ROUND_HALF_EVEN) + else: + d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN) + + if return_int: + return int(d) + else: + return float(d) + + +### From Python 2.7's decimal.py. Only needed to support Py2.6: + +def from_float_26(f): + """Converts a float to a decimal number, exactly. + + Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). + Since 0.1 is not exactly representable in binary floating point, the + value is stored as the nearest representable value which is + 0x1.999999999999ap-4. The exact equivalent of the value in decimal + is 0.1000000000000000055511151231257827021181583404541015625. + + >>> Decimal.from_float(0.1) + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_float(float('nan')) + Decimal('NaN') + >>> Decimal.from_float(float('inf')) + Decimal('Infinity') + >>> Decimal.from_float(-float('inf')) + Decimal('-Infinity') + >>> Decimal.from_float(-0.0) + Decimal('-0') + + """ + import math as _math + from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) + + if isinstance(f, (int, long)): # handle integer inputs + return Decimal(f) + if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float + return Decimal(repr(f)) + if _math.copysign(1.0, f) == 1.0: + sign = 0 + else: + sign = 1 + n, d = abs(f).as_integer_ratio() + # int.bit_length() method doesn't exist on Py2.6: + def bit_length(d): + if d != 0: + return len(bin(abs(d))) - 2 + else: + return 0 + k = bit_length(d) - 1 + result = _dec_from_triple(sign, str(n*5**k), -k) + return result + + +__all__ = ['newround'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/builtins/newsuper.py b/.install/.kodi/addons/script.module.future/libs/future/builtins/newsuper.py new file mode 100644 index 000000000..5d3402bd2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/builtins/newsuper.py @@ -0,0 +1,114 @@ +''' +This module provides a newsuper() function in Python 2 that mimics the +behaviour of super() in Python 3. It is designed to be used as follows: + + from __future__ import division, absolute_import, print_function + from future.builtins import super + +And then, for example: + + class VerboseList(list): + def append(self, item): + print('Adding an item') + super().append(item) # new simpler super() function + +Importing this module on Python 3 has no effect. + +This is based on (i.e. almost identical to) Ryan Kelly's magicsuper +module here: + + https://github.com/rfk/magicsuper.git + +Excerpts from Ryan's docstring: + + "Of course, you can still explicitly pass in the arguments if you want + to do something strange. Sometimes you really do want that, e.g. to + skip over some classes in the method resolution order. + + "How does it work? By inspecting the calling frame to determine the + function object being executed and the object on which it's being + called, and then walking the object's __mro__ chain to find out where + that function was defined. Yuck, but it seems to work..." +''' + +from __future__ import absolute_import +import sys +from types import FunctionType + +from future.utils import PY3, PY26 + + +_builtin_super = super + +_SENTINEL = object() + +def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): + '''Like builtin super(), but capable of magic. + + This acts just like the builtin super() function, but if called + without any arguments it attempts to infer them at runtime. + ''' + # Infer the correct call if used without arguments. + if typ is _SENTINEL: + # We'll need to do some frame hacking. + f = sys._getframe(framedepth) + + try: + # Get the function's first positional argument. + type_or_obj = f.f_locals[f.f_code.co_varnames[0]] + except (IndexError, KeyError,): + raise RuntimeError('super() used in a function with no args') + + try: + # Get the MRO so we can crawl it. + mro = type_or_obj.__mro__ + except (AttributeError, RuntimeError): # see issue #160 + try: + mro = type_or_obj.__class__.__mro__ + except AttributeError: + raise RuntimeError('super() used with a non-newstyle class') + + # A ``for...else`` block? Yes! It's odd, but useful. + # If unfamiliar with for...else, see: + # + # http://psung.blogspot.com/2007/12/for-else-in-python.html + for typ in mro: + # Find the class that owns the currently-executing method. + for meth in typ.__dict__.values(): + # Drill down through any wrappers to the underlying func. + # This handles e.g. classmethod() and staticmethod(). + try: + while not isinstance(meth,FunctionType): + if isinstance(meth, property): + # Calling __get__ on the property will invoke + # user code which might throw exceptions or have + # side effects + meth = meth.fget + else: + try: + meth = meth.__func__ + except AttributeError: + meth = meth.__get__(type_or_obj, typ) + except (AttributeError, TypeError): + continue + if meth.func_code is f.f_code: + break # Aha! Found you. + else: + continue # Not found! Move onto the next class in MRO. + break # Found! Break out of the search loop. + else: + raise RuntimeError('super() called outside a method') + + # Dispatch to builtin super(). + if type_or_obj is not _SENTINEL: + return _builtin_super(typ, type_or_obj) + return _builtin_super(typ) + + +def superm(*args, **kwds): + f = sys._getframe(1) + nm = f.f_code.co_name + return getattr(newsuper(framedepth=2),nm)(*args, **kwds) + + +__all__ = ['newsuper'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/__init__.py new file mode 100644 index 000000000..040fdcf01 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/__init__.py @@ -0,0 +1,8 @@ +# future.moves package +from __future__ import absolute_import +import sys +__future_module__ = True +from future.standard_library import import_top_level_modules + +if sys.version_info[0] == 3: + import_top_level_modules() diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/_dummy_thread.py b/.install/.kodi/addons/script.module.future/libs/future/moves/_dummy_thread.py new file mode 100644 index 000000000..688d249bb --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/_dummy_thread.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _dummy_thread import * +else: + __future_module__ = True + from dummy_thread import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/_markupbase.py b/.install/.kodi/addons/script.module.future/libs/future/moves/_markupbase.py new file mode 100644 index 000000000..f9fb4bbf2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/_markupbase.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _markupbase import * +else: + __future_module__ = True + from markupbase import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/_thread.py b/.install/.kodi/addons/script.module.future/libs/future/moves/_thread.py new file mode 100644 index 000000000..c68018bb1 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/_thread.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _thread import * +else: + __future_module__ = True + from thread import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/builtins.py b/.install/.kodi/addons/script.module.future/libs/future/moves/builtins.py new file mode 100644 index 000000000..e4b6221d5 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/builtins.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from builtins import * +else: + __future_module__ = True + from __builtin__ import * + # Overwrite any old definitions with the equivalent future.builtins ones: + from future.builtins import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/collections.py b/.install/.kodi/addons/script.module.future/libs/future/moves/collections.py new file mode 100644 index 000000000..664ee6a3d --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/collections.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +import sys + +from future.utils import PY2, PY26 +__future_module__ = True + +from collections import * + +if PY2: + from UserDict import UserDict + from UserList import UserList + from UserString import UserString + +if PY26: + from future.backports.misc import OrderedDict, Counter + +if sys.version_info < (3, 3): + from future.backports.misc import ChainMap, _count_elements diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/configparser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/configparser.py new file mode 100644 index 000000000..33d9cf953 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/configparser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from future.utils import PY2 + +if PY2: + from ConfigParser import * +else: + from configparser import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/copyreg.py b/.install/.kodi/addons/script.module.future/libs/future/moves/copyreg.py new file mode 100644 index 000000000..21c7a42f2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/copyreg.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from copyreg import * +else: + __future_module__ = True + from copy_reg import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/__init__.py new file mode 100644 index 000000000..626b406f7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/__init__.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from dbm import * +else: + __future_module__ = True + from whichdb import * + from anydbm import * + +# Py3.3's dbm/__init__.py imports ndbm but doesn't expose it via __all__. +# In case some (badly written) code depends on dbm.ndbm after import dbm, +# we simulate this: +if PY3: + from dbm import ndbm +else: + try: + from future.moves.dbm import ndbm + except ImportError: + ndbm = None diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/dumb.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/dumb.py new file mode 100644 index 000000000..528383f6d --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/dumb.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.dumb import * +else: + __future_module__ = True + from dumbdbm import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/gnu.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/gnu.py new file mode 100644 index 000000000..68ccf67b9 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/gnu.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.gnu import * +else: + __future_module__ = True + from gdbm import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/ndbm.py b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/ndbm.py new file mode 100644 index 000000000..8c6fff8ab --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/dbm/ndbm.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.ndbm import * +else: + __future_module__ = True + from dbm import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/html/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/html/__init__.py new file mode 100644 index 000000000..22ed6e7d2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/html/__init__.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if PY3: + from html import * +else: + # cgi.escape isn't good enough for the single Py3.3 html test to pass. + # Define it inline here instead. From the Py3.4 stdlib. Note that the + # html.escape() function from the Py3.3 stdlib is not suitable for use on + # Py2.x. + """ + General functions for HTML manipulation. + """ + + def escape(s, quote=True): + """ + Replace special characters "&", "<" and ">" to HTML-safe sequences. + If the optional flag quote is true (the default), the quotation mark + characters, both double quote (") and single quote (') characters are also + translated. + """ + s = s.replace("&", "&") # Must be done first! + s = s.replace("<", "<") + s = s.replace(">", ">") + if quote: + s = s.replace('"', """) + s = s.replace('\'', "'") + return s + + __all__ = ['escape'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/html/entities.py b/.install/.kodi/addons/script.module.future/libs/future/moves/html/entities.py new file mode 100644 index 000000000..56a886091 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/html/entities.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from html.entities import * +else: + __future_module__ = True + from htmlentitydefs import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/html/parser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/html/parser.py new file mode 100644 index 000000000..a6115b59f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/html/parser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if PY3: + from html.parser import * +else: + from HTMLParser import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/__init__.py new file mode 100644 index 000000000..917b3d71a --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/__init__.py @@ -0,0 +1,4 @@ +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/client.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/client.py new file mode 100644 index 000000000..55f9c9c1a --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/client.py @@ -0,0 +1,8 @@ +from future.utils import PY3 + +if PY3: + from http.client import * +else: + from httplib import * + from httplib import HTTPMessage + __future_module__ = True diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookiejar.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookiejar.py new file mode 100644 index 000000000..ea00df772 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookiejar.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.cookiejar import * +else: + __future_module__ = True + from cookielib import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookies.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookies.py new file mode 100644 index 000000000..1b74fe2dd --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/cookies.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.cookies import * +else: + __future_module__ = True + from Cookie import * + from Cookie import Morsel # left out of __all__ on Py2.7! diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/http/server.py b/.install/.kodi/addons/script.module.future/libs/future/moves/http/server.py new file mode 100644 index 000000000..4e75cc1de --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/http/server.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.server import * +else: + __future_module__ = True + from BaseHTTPServer import * + from CGIHTTPServer import * + from SimpleHTTPServer import * + try: + from CGIHTTPServer import _url_collapse_path # needed for a test + except ImportError: + try: + # Python 2.7.0 to 2.7.3 + from CGIHTTPServer import ( + _url_collapse_path_split as _url_collapse_path) + except ImportError: + # Doesn't exist on Python 2.6.x. Ignore it. + pass diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/itertools.py b/.install/.kodi/addons/script.module.future/libs/future/moves/itertools.py new file mode 100644 index 000000000..e5eb20d5d --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/itertools.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from itertools import * +try: + zip_longest = izip_longest + filterfalse = ifilterfalse +except NameError: + pass diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/pickle.py b/.install/.kodi/addons/script.module.future/libs/future/moves/pickle.py new file mode 100644 index 000000000..c53d69392 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/pickle.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from pickle import * +else: + __future_module__ = True + try: + from cPickle import * + except ImportError: + from pickle import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/queue.py b/.install/.kodi/addons/script.module.future/libs/future/moves/queue.py new file mode 100644 index 000000000..1cb1437d7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/queue.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from queue import * +else: + __future_module__ = True + from Queue import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/reprlib.py b/.install/.kodi/addons/script.module.future/libs/future/moves/reprlib.py new file mode 100644 index 000000000..a313a13a4 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/reprlib.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from reprlib import * +else: + __future_module__ = True + from repr import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/socketserver.py b/.install/.kodi/addons/script.module.future/libs/future/moves/socketserver.py new file mode 100644 index 000000000..062e0848d --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/socketserver.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from socketserver import * +else: + __future_module__ = True + from SocketServer import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/subprocess.py b/.install/.kodi/addons/script.module.future/libs/future/moves/subprocess.py new file mode 100644 index 000000000..43ffd2ac2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/subprocess.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import +from future.utils import PY2, PY26 + +from subprocess import * + +if PY2: + __future_module__ = True + from commands import getoutput, getstatusoutput + +if PY26: + from future.backports.misc import check_output diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/sys.py b/.install/.kodi/addons/script.module.future/libs/future/moves/sys.py new file mode 100644 index 000000000..1293bcb07 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/sys.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from future.utils import PY2 + +from sys import * + +if PY2: + from __builtin__ import intern diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/test/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/test/__init__.py new file mode 100644 index 000000000..5cf428b6e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/test/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/test/support.py b/.install/.kodi/addons/script.module.future/libs/future/moves/test/support.py new file mode 100644 index 000000000..e9aa0f48f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/test/support.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks +from future.utils import PY3 + +if PY3: + from test.support import * +else: + __future_module__ = True + with suspend_hooks(): + from test.test_support import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/__init__.py new file mode 100644 index 000000000..e40829663 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/__init__.py @@ -0,0 +1,27 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if not PY3: + from Tkinter import * + from Tkinter import (_cnfmerge, _default_root, _flatten, + _support_default_root, _test, + _tkinter, _setit) + + try: # >= 2.7.4 + from Tkinter import (_join) + except ImportError: + pass + + try: # >= 2.7.4 + from Tkinter import (_stringify) + except ImportError: + pass + + try: # >= 2.7.9 + from Tkinter import (_splitdict) + except ImportError: + pass + +else: + from tkinter import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/colorchooser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/colorchooser.py new file mode 100644 index 000000000..6dde6e8d3 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/colorchooser.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.colorchooser import * +else: + try: + from tkColorChooser import * + except ImportError: + raise ImportError('The tkColorChooser module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/commondialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/commondialog.py new file mode 100644 index 000000000..eb7ae8d60 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/commondialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.commondialog import * +else: + try: + from tkCommonDialog import * + except ImportError: + raise ImportError('The tkCommonDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/constants.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/constants.py new file mode 100644 index 000000000..ffe098152 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/constants.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.constants import * +else: + try: + from Tkconstants import * + except ImportError: + raise ImportError('The Tkconstants module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dialog.py new file mode 100644 index 000000000..113370ca2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.dialog import * +else: + try: + from Dialog import * + except ImportError: + raise ImportError('The Dialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dnd.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dnd.py new file mode 100644 index 000000000..1ab437917 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/dnd.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.dnd import * +else: + try: + from Tkdnd import * + except ImportError: + raise ImportError('The Tkdnd module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/filedialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/filedialog.py new file mode 100644 index 000000000..973923e2c --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/filedialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.filedialog import * +else: + try: + from FileDialog import * + except ImportError: + raise ImportError('The FileDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/font.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/font.py new file mode 100644 index 000000000..628f399a3 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/font.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.font import * +else: + try: + from tkFont import * + except ImportError: + raise ImportError('The tkFont module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/messagebox.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/messagebox.py new file mode 100644 index 000000000..b43d8702f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/messagebox.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.messagebox import * +else: + try: + from tkMessageBox import * + except ImportError: + raise ImportError('The tkMessageBox module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/scrolledtext.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/scrolledtext.py new file mode 100644 index 000000000..1c69db606 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/scrolledtext.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.scrolledtext import * +else: + try: + from ScrolledText import * + except ImportError: + raise ImportError('The ScrolledText module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/simpledialog.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/simpledialog.py new file mode 100644 index 000000000..dba93fbf2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/simpledialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.simpledialog import * +else: + try: + from SimpleDialog import * + except ImportError: + raise ImportError('The SimpleDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/tix.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/tix.py new file mode 100644 index 000000000..8d1718ad0 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/tix.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.tix import * +else: + try: + from Tix import * + except ImportError: + raise ImportError('The Tix module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/ttk.py b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/ttk.py new file mode 100644 index 000000000..081c1b495 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/tkinter/ttk.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.ttk import * +else: + try: + from ttk import * + except ImportError: + raise ImportError('The ttk module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/__init__.py new file mode 100644 index 000000000..5cf428b6e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/error.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/error.py new file mode 100644 index 000000000..7d8ada73f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/error.py @@ -0,0 +1,16 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks + +from future.utils import PY3 + +if PY3: + from urllib.error import * +else: + __future_module__ = True + + # We use this method to get at the original Py2 urllib before any renaming magic + # ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError + + with suspend_hooks(): + from urllib import ContentTooShortError + from urllib2 import URLError, HTTPError diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/parse.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/parse.py new file mode 100644 index 000000000..9074b8163 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/parse.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks + +from future.utils import PY3 + +if PY3: + from urllib.parse import * +else: + __future_module__ = True + from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl, + urldefrag, urljoin, urlparse, urlsplit, + urlunparse, urlunsplit) + + # we use this method to get at the original py2 urllib before any renaming + # quote = sys.py2_modules['urllib'].quote + # quote_plus = sys.py2_modules['urllib'].quote_plus + # unquote = sys.py2_modules['urllib'].unquote + # unquote_plus = sys.py2_modules['urllib'].unquote_plus + # urlencode = sys.py2_modules['urllib'].urlencode + # splitquery = sys.py2_modules['urllib'].splitquery + + with suspend_hooks(): + from urllib import (quote, + quote_plus, + unquote, + unquote_plus, + urlencode, + splitquery) diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/request.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/request.py new file mode 100644 index 000000000..60e440a77 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/request.py @@ -0,0 +1,93 @@ +from __future__ import absolute_import + +from future.standard_library import suspend_hooks +from future.utils import PY3 + +if PY3: + from urllib.request import * + # This aren't in __all__: + from urllib.request import (getproxies, + pathname2url, + proxy_bypass, + quote, + request_host, + splitattr, + splithost, + splitpasswd, + splitport, + splitquery, + splittag, + splittype, + splituser, + splitvalue, + thishost, + to_bytes, + unquote, + unwrap, + url2pathname, + urlcleanup, + urljoin, + urlopen, + urlparse, + urlretrieve, + urlsplit, + urlunparse) +else: + __future_module__ = True + with suspend_hooks(): + from urllib import * + from urllib2 import * + from urlparse import * + + # Rename: + from urllib import toBytes # missing from __all__ on Py2.6 + to_bytes = toBytes + + # from urllib import (pathname2url, + # url2pathname, + # getproxies, + # urlretrieve, + # urlcleanup, + # URLopener, + # FancyURLopener, + # proxy_bypass) + + # from urllib2 import ( + # AbstractBasicAuthHandler, + # AbstractDigestAuthHandler, + # BaseHandler, + # CacheFTPHandler, + # FileHandler, + # FTPHandler, + # HTTPBasicAuthHandler, + # HTTPCookieProcessor, + # HTTPDefaultErrorHandler, + # HTTPDigestAuthHandler, + # HTTPErrorProcessor, + # HTTPHandler, + # HTTPPasswordMgr, + # HTTPPasswordMgrWithDefaultRealm, + # HTTPRedirectHandler, + # HTTPSHandler, + # URLError, + # build_opener, + # install_opener, + # OpenerDirector, + # ProxyBasicAuthHandler, + # ProxyDigestAuthHandler, + # ProxyHandler, + # Request, + # UnknownHandler, + # urlopen, + # ) + + # from urlparse import ( + # urldefrag + # urljoin, + # urlparse, + # urlunparse, + # urlsplit, + # urlunsplit, + # parse_qs, + # parse_q" + # ) diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/response.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/response.py new file mode 100644 index 000000000..a287ae283 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/response.py @@ -0,0 +1,12 @@ +from future import standard_library +from future.utils import PY3 + +if PY3: + from urllib.response import * +else: + __future_module__ = True + with standard_library.suspend_hooks(): + from urllib import (addbase, + addclosehook, + addinfo, + addinfourl) diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/robotparser.py b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/robotparser.py new file mode 100644 index 000000000..0dc8f5715 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/urllib/robotparser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from urllib.robotparser import * +else: + __future_module__ = True + from robotparser import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/winreg.py b/.install/.kodi/addons/script.module.future/libs/future/moves/winreg.py new file mode 100644 index 000000000..c8b147568 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/winreg.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from winreg import * +else: + __future_module__ = True + from _winreg import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/client.py b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/client.py new file mode 100644 index 000000000..4708cf899 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/client.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from xmlrpc.client import * +else: + from xmlrpclib import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/server.py b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/server.py new file mode 100644 index 000000000..1a8af3454 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/moves/xmlrpc/server.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from xmlrpc.server import * +else: + from xmlrpclib import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_dummy_thread/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_dummy_thread/__init__.py new file mode 100644 index 000000000..63dced6e5 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_dummy_thread/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from dummy_thread import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_markupbase/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_markupbase/__init__.py new file mode 100644 index 000000000..290906540 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_markupbase/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from markupbase import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_thread/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_thread/__init__.py new file mode 100644 index 000000000..9f2a51c75 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/_thread/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from thread import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/copyreg/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/copyreg/__init__.py new file mode 100644 index 000000000..51bd4b9a7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/copyreg/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +import sys + +if sys.version_info[0] < 3: + from copy_reg import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/__init__.py new file mode 100644 index 000000000..e957e7457 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +import sys + +if sys.version_info[0] < 3: + from future.moves.html import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/entities.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/entities.py new file mode 100644 index 000000000..211649e53 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/entities.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from html.entities import * +else: + from future.moves.html.entities import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/parser.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/parser.py new file mode 100644 index 000000000..541def391 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/html/parser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] == 3: + raise ImportError('Cannot import module from python-future source folder') +else: + from future.moves.html.parser import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/__init__.py new file mode 100644 index 000000000..e4f853e53 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +import sys + +if sys.version_info[0] < 3: + pass +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/client.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/client.py new file mode 100644 index 000000000..7566fe4dc --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/client.py @@ -0,0 +1,91 @@ +from __future__ import absolute_import +import sys + +assert sys.version_info[0] < 3 + +from httplib import * +from httplib import HTTPMessage + +# These constants aren't included in __all__ in httplib.py: + +from httplib import (HTTP_PORT, + HTTPS_PORT, + + _CS_IDLE, + _CS_REQ_STARTED, + _CS_REQ_SENT, + + CONTINUE, + SWITCHING_PROTOCOLS, + PROCESSING, + + OK, + CREATED, + ACCEPTED, + NON_AUTHORITATIVE_INFORMATION, + NO_CONTENT, + RESET_CONTENT, + PARTIAL_CONTENT, + MULTI_STATUS, + IM_USED, + + MULTIPLE_CHOICES, + MOVED_PERMANENTLY, + FOUND, + SEE_OTHER, + NOT_MODIFIED, + USE_PROXY, + TEMPORARY_REDIRECT, + + BAD_REQUEST, + UNAUTHORIZED, + PAYMENT_REQUIRED, + FORBIDDEN, + NOT_FOUND, + METHOD_NOT_ALLOWED, + NOT_ACCEPTABLE, + PROXY_AUTHENTICATION_REQUIRED, + REQUEST_TIMEOUT, + CONFLICT, + GONE, + LENGTH_REQUIRED, + PRECONDITION_FAILED, + REQUEST_ENTITY_TOO_LARGE, + REQUEST_URI_TOO_LONG, + UNSUPPORTED_MEDIA_TYPE, + REQUESTED_RANGE_NOT_SATISFIABLE, + EXPECTATION_FAILED, + UNPROCESSABLE_ENTITY, + LOCKED, + FAILED_DEPENDENCY, + UPGRADE_REQUIRED, + + INTERNAL_SERVER_ERROR, + NOT_IMPLEMENTED, + BAD_GATEWAY, + SERVICE_UNAVAILABLE, + GATEWAY_TIMEOUT, + HTTP_VERSION_NOT_SUPPORTED, + INSUFFICIENT_STORAGE, + NOT_EXTENDED, + + MAXAMOUNT, + ) + +# These are not available on Python 2.6.x: +try: + from httplib import LineTooLong, LineAndFileWrapper +except ImportError: + pass + +# These may not be available on all versions of Python 2.6.x or 2.7.x +try: + from httplib import ( + _MAXLINE, + _MAXHEADERS, + _is_legal_header_name, + _is_illegal_header_value, + _METHODS_EXPECTING_BODY + ) +except ImportError: + pass diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookiejar.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookiejar.py new file mode 100644 index 000000000..d847b2bf2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookiejar.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import +import sys + +assert sys.version_info[0] < 3 + +from cookielib import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookies.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookies.py new file mode 100644 index 000000000..eb2a82388 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/cookies.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +import sys + +assert sys.version_info[0] < 3 + +from Cookie import * +from Cookie import Morsel # left out of __all__ on Py2.7! diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/server.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/server.py new file mode 100644 index 000000000..297105578 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/http/server.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +import sys + +assert sys.version_info[0] < 3 + +from BaseHTTPServer import * +from CGIHTTPServer import * +from SimpleHTTPServer import * +try: + from CGIHTTPServer import _url_collapse_path # needed for a test +except ImportError: + try: + # Python 2.7.0 to 2.7.3 + from CGIHTTPServer import ( + _url_collapse_path_split as _url_collapse_path) + except ImportError: + # Doesn't exist on Python 2.6.x. Ignore it. + pass diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/queue/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/queue/__init__.py new file mode 100644 index 000000000..22bd296b6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/queue/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from Queue import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/reprlib/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/reprlib/__init__.py new file mode 100644 index 000000000..6ccf9c006 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/reprlib/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +import sys + +if sys.version_info[0] < 3: + from repr import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/socketserver/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/socketserver/__init__.py new file mode 100644 index 000000000..c5b8c9c28 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/socketserver/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +import sys + +if sys.version_info[0] < 3: + from SocketServer import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/winreg/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/winreg/__init__.py new file mode 100644 index 000000000..97243bbb8 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/winreg/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from _winreg import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/__init__.py new file mode 100644 index 000000000..e4f853e53 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/__init__.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +import sys + +if sys.version_info[0] < 3: + pass +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/client.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/client.py new file mode 100644 index 000000000..a8d0827e9 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/client.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +import sys + +assert sys.version_info[0] < 3 +from xmlrpclib import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/server.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/server.py new file mode 100644 index 000000000..a8d0827e9 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/Lib/xmlrpc/server.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +import sys + +assert sys.version_info[0] < 3 +from xmlrpclib import * diff --git a/.install/.kodi/addons/script.module.future/libs/future/standard_library/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/standard_library/__init__.py new file mode 100644 index 000000000..e64568f2f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/standard_library/__init__.py @@ -0,0 +1,826 @@ +""" +Python 3 reorganized the standard library (PEP 3108). This module exposes +several standard library modules to Python 2 under their new Python 3 +names. + +It is designed to be used as follows:: + + from future import standard_library + standard_library.install_aliases() + +And then these normal Py3 imports work on both Py3 and Py2:: + + import builtins + import copyreg + import queue + import reprlib + import socketserver + import winreg # on Windows only + import test.support + import html, html.parser, html.entites + import http, http.client, http.server + import http.cookies, http.cookiejar + import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser + import xmlrpc.client, xmlrpc.server + + import _thread + import _dummy_thread + import _markupbase + + from itertools import filterfalse, zip_longest + from sys import intern + from collections import UserDict, UserList, UserString + from collections import OrderedDict, Counter, ChainMap # even on Py2.6 + from subprocess import getoutput, getstatusoutput + from subprocess import check_output # even on Py2.6 + +(The renamed modules and functions are still available under their old +names on Python 2.) + +This is a cleaner alternative to this idiom (see +http://docs.pythonsprints.com/python3_porting/py-porting.html):: + + try: + import queue + except ImportError: + import Queue as queue + + +Limitations +----------- +We don't currently support these modules, but would like to:: + + import dbm + import dbm.dumb + import dbm.gnu + import collections.abc # on Py33 + import pickle # should (optionally) bring in cPickle on Python 2 + +""" + +from __future__ import absolute_import, division, print_function + +import sys +import logging +import imp +import contextlib +import types +import copy +import os + +# Make a dedicated logger; leave the root logger to be configured +# by the application. +flog = logging.getLogger('future_stdlib') +_formatter = logging.Formatter(logging.BASIC_FORMAT) +_handler = logging.StreamHandler() +_handler.setFormatter(_formatter) +flog.addHandler(_handler) +flog.setLevel(logging.WARN) + +from future.utils import PY2, PY3 + +# Added by Roman V.M. for using in Kodi +if PY2: + from xbmcaddon import Addon + sys.path.append(os.path.join( + Addon('script.module.future').getAddonInfo('path').decode('utf-8'), + 'libs', + 'future', + 'standard_library', + 'Lib') + ) + +# The modules that are defined under the same names on Py3 but with +# different contents in a significant way (e.g. submodules) are: +# pickle (fast one) +# dbm +# urllib +# test +# email + +REPLACED_MODULES = set(['test', 'urllib', 'pickle', 'dbm']) # add email and dbm when we support it + +# The following module names are not present in Python 2.x, so they cause no +# potential clashes between the old and new names: +# http +# html +# tkinter +# xmlrpc +# Keys: Py2 / real module names +# Values: Py3 / simulated module names +RENAMES = { + # 'cStringIO': 'io', # there's a new io module in Python 2.6 + # that provides StringIO and BytesIO + # 'StringIO': 'io', # ditto + # 'cPickle': 'pickle', + '__builtin__': 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'future.moves.socketserver': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + # 'FileDialog': 'tkinter.filedialog', + # 'tkFileDialog': 'tkinter.filedialog', + # 'SimpleDialog': 'tkinter.simpledialog', + # 'tkSimpleDialog': 'tkinter.simpledialog', + # 'tkColorChooser': 'tkinter.colorchooser', + # 'tkCommonDialog': 'tkinter.commondialog', + # 'Dialog': 'tkinter.dialog', + # 'Tkdnd': 'tkinter.dnd', + # 'tkFont': 'tkinter.font', + # 'tkMessageBox': 'tkinter.messagebox', + # 'ScrolledText': 'tkinter.scrolledtext', + # 'Tkconstants': 'tkinter.constants', + # 'Tix': 'tkinter.tix', + # 'ttk': 'tkinter.ttk', + # 'Tkinter': 'tkinter', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + # 'anydbm': 'dbm', # causes infinite import loop + # 'whichdb': 'dbm', # causes infinite import loop + # anydbm and whichdb are handled by fix_imports2 + # 'dbhash': 'dbm.bsd', + # 'dumbdbm': 'dbm.dumb', + # 'dbm': 'dbm.ndbm', + # 'gdbm': 'dbm.gnu', + 'future.moves.xmlrpc': 'xmlrpc', + # 'future.backports.email': 'email', # for use by urllib + # 'DocXMLRPCServer': 'xmlrpc.server', + # 'SimpleXMLRPCServer': 'xmlrpc.server', + # 'httplib': 'http.client', + # 'htmlentitydefs' : 'html.entities', + # 'HTMLParser' : 'html.parser', + # 'Cookie': 'http.cookies', + # 'cookielib': 'http.cookiejar', + # 'BaseHTTPServer': 'http.server', + # 'SimpleHTTPServer': 'http.server', + # 'CGIHTTPServer': 'http.server', + # 'future.backports.test': 'test', # primarily for renaming test_support to support + # 'commands': 'subprocess', + # 'urlparse' : 'urllib.parse', + # 'robotparser' : 'urllib.robotparser', + # 'abc': 'collections.abc', # for Py33 + # 'future.utils.six.moves.html': 'html', + # 'future.utils.six.moves.http': 'http', + 'future.moves.html': 'html', + 'future.moves.http': 'http', + # 'future.backports.urllib': 'urllib', + # 'future.utils.six.moves.urllib': 'urllib', + 'future.moves._markupbase': '_markupbase', + } + + +# It is complicated and apparently brittle to mess around with the +# ``sys.modules`` cache in order to support "import urllib" meaning two +# different things (Py2.7 urllib and backported Py3.3-like urllib) in different +# contexts. So we require explicit imports for these modules. +assert len(set(RENAMES.values()) & set(REPLACED_MODULES)) == 0 + + +# Harmless renames that we can insert. +# These modules need names from elsewhere being added to them: +# subprocess: should provide getoutput and other fns from commands +# module but these fns are missing: getstatus, mk2arg, +# mkarg +# re: needs an ASCII constant that works compatibly with Py3 + +# etc: see lib2to3/fixes/fix_imports.py + +# (New module name, new object name, old module name, old object name) +MOVES = [('collections', 'UserList', 'UserList', 'UserList'), + ('collections', 'UserDict', 'UserDict', 'UserDict'), + ('collections', 'UserString','UserString', 'UserString'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), + ('itertools', 'filterfalse','itertools', 'ifilterfalse'), + ('itertools', 'zip_longest','itertools', 'izip_longest'), + ('sys', 'intern','__builtin__', 'intern'), + # The re module has no ASCII flag in Py2, but this is the default. + # Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one + # (and it exists on Py2.6+). + ('re', 'ASCII','stat', 'ST_MODE'), + ('base64', 'encodebytes','base64', 'encodestring'), + ('base64', 'decodebytes','base64', 'decodestring'), + ('subprocess', 'getoutput', 'commands', 'getoutput'), + ('subprocess', 'getstatusoutput', 'commands', 'getstatusoutput'), + ('subprocess', 'check_output', 'future.backports.misc', 'check_output'), + ('math', 'ceil', 'future.backports.misc', 'ceil'), + ('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'), + ('collections', 'Counter', 'future.backports.misc', 'Counter'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), + ('itertools', 'count', 'future.backports.misc', 'count'), + ('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'), + ('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'), + +# This is no use, since "import urllib.request" etc. still fails: +# ('urllib', 'error', 'future.moves.urllib', 'error'), +# ('urllib', 'parse', 'future.moves.urllib', 'parse'), +# ('urllib', 'request', 'future.moves.urllib', 'request'), +# ('urllib', 'response', 'future.moves.urllib', 'response'), +# ('urllib', 'robotparser', 'future.moves.urllib', 'robotparser'), + ] + + +# A minimal example of an import hook: +# class WarnOnImport(object): +# def __init__(self, *args): +# self.module_names = args +# +# def find_module(self, fullname, path=None): +# if fullname in self.module_names: +# self.path = path +# return self +# return None +# +# def load_module(self, name): +# if name in sys.modules: +# return sys.modules[name] +# module_info = imp.find_module(name, self.path) +# module = imp.load_module(name, *module_info) +# sys.modules[name] = module +# flog.warning("Imported deprecated module %s", name) +# return module + + +class RenameImport(object): + """ + A class for import hooks mapping Py3 module names etc. to the Py2 equivalents. + """ + # Different RenameImport classes are created when importing this module from + # different source files. This causes isinstance(hook, RenameImport) checks + # to produce inconsistent results. We add this RENAMER attribute here so + # remove_hooks() and install_hooks() can find instances of these classes + # easily: + RENAMER = True + + def __init__(self, old_to_new): + ''' + Pass in a dictionary-like object mapping from old names to new + names. E.g. {'ConfigParser': 'configparser', 'cPickle': 'pickle'} + ''' + self.old_to_new = old_to_new + both = set(old_to_new.keys()) & set(old_to_new.values()) + assert (len(both) == 0 and + len(set(old_to_new.values())) == len(old_to_new.values())), \ + 'Ambiguity in renaming (handler not implemented)' + self.new_to_old = dict((new, old) for (old, new) in old_to_new.items()) + + def find_module(self, fullname, path=None): + # Handles hierarchical importing: package.module.module2 + new_base_names = set([s.split('.')[0] for s in self.new_to_old]) + # Before v0.12: Was: if fullname in set(self.old_to_new) | new_base_names: + if fullname in new_base_names: + return self + return None + + def load_module(self, name): + path = None + if name in sys.modules: + return sys.modules[name] + elif name in self.new_to_old: + # New name. Look up the corresponding old (Py2) name: + oldname = self.new_to_old[name] + module = self._find_and_load_module(oldname) + # module.__future_module__ = True + else: + module = self._find_and_load_module(name) + # In any case, make it available under the requested (Py3) name + sys.modules[name] = module + return module + + def _find_and_load_module(self, name, path=None): + """ + Finds and loads it. But if there's a . in the name, handles it + properly. + """ + bits = name.split('.') + while len(bits) > 1: + # Treat the first bit as a package + packagename = bits.pop(0) + package = self._find_and_load_module(packagename, path) + try: + path = package.__path__ + except AttributeError: + # This could be e.g. moves. + flog.debug('Package {0} has no __path__.'.format(package)) + if name in sys.modules: + return sys.modules[name] + flog.debug('What to do here?') + + name = bits[0] + module_info = imp.find_module(name, path) + return imp.load_module(name, *module_info) + + +class hooks(object): + """ + Acts as a context manager. Saves the state of sys.modules and restores it + after the 'with' block. + + Use like this: + + >>> from future import standard_library + >>> with standard_library.hooks(): + ... import http.client + >>> import requests + + For this to work, http.client will be scrubbed from sys.modules after the + 'with' block. That way the modules imported in the 'with' block will + continue to be accessible in the current namespace but not from any + imported modules (like requests). + """ + def __enter__(self): + # flog.debug('Entering hooks context manager') + self.old_sys_modules = copy.copy(sys.modules) + self.hooks_were_installed = detect_hooks() + # self.scrubbed = scrub_py2_sys_modules() + install_hooks() + return self + + def __exit__(self, *args): + # flog.debug('Exiting hooks context manager') + # restore_sys_modules(self.scrubbed) + if not self.hooks_were_installed: + remove_hooks() + # scrub_future_sys_modules() + +# Sanity check for is_py2_stdlib_module(): We aren't replacing any +# builtin modules names: +if PY2: + assert len(set(RENAMES.values()) & set(sys.builtin_module_names)) == 0 + + +def is_py2_stdlib_module(m): + """ + Tries to infer whether the module m is from the Python 2 standard library. + This may not be reliable on all systems. + """ + if PY3: + return False + if not 'stdlib_path' in is_py2_stdlib_module.__dict__: + stdlib_files = [contextlib.__file__, os.__file__, copy.__file__] + stdlib_paths = [os.path.split(f)[0] for f in stdlib_files] + if not len(set(stdlib_paths)) == 1: + # This seems to happen on travis-ci.org. Very strange. We'll try to + # ignore it. + flog.warn('Multiple locations found for the Python standard ' + 'library: %s' % stdlib_paths) + # Choose the first one arbitrarily + is_py2_stdlib_module.stdlib_path = stdlib_paths[0] + + if m.__name__ in sys.builtin_module_names: + return True + + if hasattr(m, '__file__'): + modpath = os.path.split(m.__file__) + if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and + 'site-packages' not in modpath[0]): + return True + + return False + + +def scrub_py2_sys_modules(): + """ + Removes any Python 2 standard library modules from ``sys.modules`` that + would interfere with Py3-style imports using import hooks. Examples are + modules with the same names (like urllib or email). + + (Note that currently import hooks are disabled for modules like these + with ambiguous names anyway ...) + """ + if PY3: + return {} + scrubbed = {} + for modulename in REPLACED_MODULES & set(RENAMES.keys()): + if not modulename in sys.modules: + continue + + module = sys.modules[modulename] + + if is_py2_stdlib_module(module): + flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename)) + scrubbed[modulename] = sys.modules[modulename] + del sys.modules[modulename] + return scrubbed + + +def scrub_future_sys_modules(): + """ + Deprecated. + """ + return {} + +class suspend_hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from future import standard_library + >>> standard_library.install_hooks() + >>> import http.client + >>> # ... + >>> with standard_library.suspend_hooks(): + >>> import requests # incompatible with ``future``'s standard library hooks + + If the hooks were disabled before the context, they are not installed when + the context is left. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + remove_hooks() + # self.scrubbed = scrub_future_sys_modules() + return self + + def __exit__(self, *args): + if self.hooks_were_installed: + install_hooks() + # restore_sys_modules(self.scrubbed) + + +def restore_sys_modules(scrubbed): + """ + Add any previously scrubbed modules back to the sys.modules cache, + but only if it's safe to do so. + """ + clash = set(sys.modules) & set(scrubbed) + if len(clash) != 0: + # If several, choose one arbitrarily to raise an exception about + first = list(clash)[0] + raise ImportError('future module {} clashes with Py2 module' + .format(first)) + sys.modules.update(scrubbed) + + +def install_aliases(): + """ + Monkey-patches the standard library in Py2.6/7 to provide + aliases for better Py3 compatibility. + """ + if PY3: + return + # if hasattr(install_aliases, 'run_already'): + # return + for (newmodname, newobjname, oldmodname, oldobjname) in MOVES: + __import__(newmodname) + # We look up the module in sys.modules because __import__ just returns the + # top-level package: + newmod = sys.modules[newmodname] + # newmod.__future_module__ = True + + __import__(oldmodname) + oldmod = sys.modules[oldmodname] + + obj = getattr(oldmod, oldobjname) + setattr(newmod, newobjname, obj) + + # Hack for urllib so it appears to have the same structure on Py2 as on Py3 + import urllib + from future.backports.urllib import request + from future.backports.urllib import response + from future.backports.urllib import parse + from future.backports.urllib import error + from future.backports.urllib import robotparser + urllib.request = request + urllib.response = response + urllib.parse = parse + urllib.error = error + urllib.robotparser = robotparser + sys.modules['urllib.request'] = request + sys.modules['urllib.response'] = response + sys.modules['urllib.parse'] = parse + sys.modules['urllib.error'] = error + sys.modules['urllib.robotparser'] = robotparser + + # Patch the test module so it appears to have the same structure on Py2 as on Py3 + try: + import test + except ImportError: + pass + try: + from future.moves.test import support + except ImportError: + pass + else: + test.support = support + sys.modules['test.support'] = support + + # Patch the dbm module so it appears to have the same structure on Py2 as on Py3 + try: + import dbm + except ImportError: + pass + else: + from future.moves.dbm import dumb + dbm.dumb = dumb + sys.modules['dbm.dumb'] = dumb + try: + from future.moves.dbm import gnu + except ImportError: + pass + else: + dbm.gnu = gnu + sys.modules['dbm.gnu'] = gnu + try: + from future.moves.dbm import ndbm + except ImportError: + pass + else: + dbm.ndbm = ndbm + sys.modules['dbm.ndbm'] = ndbm + + # install_aliases.run_already = True + + +def install_hooks(): + """ + This function installs the future.standard_library import hook into + sys.meta_path. + """ + if PY3: + return + + install_aliases() + + flog.debug('sys.meta_path was: {0}'.format(sys.meta_path)) + flog.debug('Installing hooks ...') + + # Add it unless it's there already + newhook = RenameImport(RENAMES) + if not detect_hooks(): + sys.meta_path.append(newhook) + flog.debug('sys.meta_path is now: {0}'.format(sys.meta_path)) + + +def enable_hooks(): + """ + Deprecated. Use install_hooks() instead. This will be removed by + ``future`` v1.0. + """ + install_hooks() + + +def remove_hooks(scrub_sys_modules=False): + """ + This function removes the import hook from sys.meta_path. + """ + if PY3: + return + flog.debug('Uninstalling hooks ...') + # Loop backwards, so deleting items keeps the ordering: + for i, hook in list(enumerate(sys.meta_path))[::-1]: + if hasattr(hook, 'RENAMER'): + del sys.meta_path[i] + + # Explicit is better than implicit. In the future the interface should + # probably change so that scrubbing the import hooks requires a separate + # function call. Left as is for now for backward compatibility with + # v0.11.x. + if scrub_sys_modules: + scrub_future_sys_modules() + + +def disable_hooks(): + """ + Deprecated. Use remove_hooks() instead. This will be removed by + ``future`` v1.0. + """ + remove_hooks() + + +def detect_hooks(): + """ + Returns True if the import hooks are installed, False if not. + """ + flog.debug('Detecting hooks ...') + present = any([hasattr(hook, 'RENAMER') for hook in sys.meta_path]) + if present: + flog.debug('Detected.') + else: + flog.debug('Not detected.') + return present + + +# As of v0.12, this no longer happens implicitly: +# if not PY3: +# install_hooks() + + +if not hasattr(sys, 'py2_modules'): + sys.py2_modules = {} + +def cache_py2_modules(): + """ + Currently this function is unneeded, as we are not attempting to provide import hooks + for modules with ambiguous names: email, urllib, pickle. + """ + if len(sys.py2_modules) != 0: + return + assert not detect_hooks() + import urllib + sys.py2_modules['urllib'] = urllib + + import email + sys.py2_modules['email'] = email + + import pickle + sys.py2_modules['pickle'] = pickle + + # Not all Python installations have test module. (Anaconda doesn't, for example.) + # try: + # import test + # except ImportError: + # sys.py2_modules['test'] = None + # sys.py2_modules['test'] = test + + # import dbm + # sys.py2_modules['dbm'] = dbm + + +def import_(module_name, backport=False): + """ + Pass a (potentially dotted) module name of a Python 3 standard library + module. This function imports the module compatibly on Py2 and Py3 and + returns the top-level module. + + Example use: + >>> http = import_('http.client') + >>> http = import_('http.server') + >>> urllib = import_('urllib.request') + + Then: + >>> conn = http.client.HTTPConnection(...) + >>> response = urllib.request.urlopen('http://mywebsite.com') + >>> # etc. + + Use as follows: + >>> package_name = import_(module_name) + + On Py3, equivalent to this: + + >>> import module_name + + On Py2, equivalent to this if backport=False: + + >>> from future.moves import module_name + + or to this if backport=True: + + >>> from future.backports import module_name + + except that it also handles dotted module names such as ``http.client`` + The effect then is like this: + + >>> from future.backports import module + >>> from future.backports.module import submodule + >>> module.submodule = submodule + + Note that this would be a SyntaxError in Python: + + >>> from future.backports import http.client + + """ + # Python 2.6 doesn't have importlib in the stdlib, so it requires + # the backported ``importlib`` package from PyPI as a dependency to use + # this function: + import importlib + + if PY3: + return __import__(module_name) + else: + # client.blah = blah + # Then http.client = client + # etc. + if backport: + prefix = 'future.backports' + else: + prefix = 'future.moves' + parts = prefix.split('.') + module_name.split('.') + + modules = [] + for i, part in enumerate(parts): + sofar = '.'.join(parts[:i+1]) + modules.append(importlib.import_module(sofar)) + for i, part in reversed(list(enumerate(parts))): + if i == 0: + break + setattr(modules[i-1], part, modules[i]) + + # Return the next-most top-level module after future.backports / future.moves: + return modules[2] + + +def from_import(module_name, *symbol_names, **kwargs): + """ + Example use: + >>> HTTPConnection = from_import('http.client', 'HTTPConnection') + >>> HTTPServer = from_import('http.server', 'HTTPServer') + >>> urlopen, urlparse = from_import('urllib.request', 'urlopen', 'urlparse') + + Equivalent to this on Py3: + + >>> from module_name import symbol_names[0], symbol_names[1], ... + + and this on Py2: + + >>> from future.moves.module_name import symbol_names[0], ... + + or: + + >>> from future.backports.module_name import symbol_names[0], ... + + except that it also handles dotted module names such as ``http.client``. + """ + + if PY3: + return __import__(module_name) + else: + if 'backport' in kwargs and bool(kwargs['backport']): + prefix = 'future.backports' + else: + prefix = 'future.moves' + parts = prefix.split('.') + module_name.split('.') + module = importlib.import_module(prefix + '.' + module_name) + output = [getattr(module, name) for name in symbol_names] + if len(output) == 1: + return output[0] + else: + return output + + +class exclude_local_folder_imports(object): + """ + A context-manager that prevents standard library modules like configparser + from being imported from the local python-future source folder on Py3. + + (This was need prior to v0.16.0 because the presence of a configparser + folder would otherwise have prevented setuptools from running on Py3. Maybe + it's not needed any more?) + """ + def __init__(self, *args): + assert len(args) > 0 + self.module_names = args + # Disallow dotted module names like http.client: + if any(['.' in m for m in self.module_names]): + raise NotImplementedError('Dotted module names are not supported') + + def __enter__(self): + self.old_sys_path = copy.copy(sys.path) + self.old_sys_modules = copy.copy(sys.modules) + if sys.version_info[0] < 3: + return + # The presence of all these indicates we've found our source folder, + # because `builtins` won't have been installed in site-packages by setup.py: + FUTURE_SOURCE_SUBFOLDERS = ['future', 'past', 'libfuturize', 'libpasteurize', 'builtins'] + + # Look for the future source folder: + for folder in self.old_sys_path: + if all([os.path.exists(os.path.join(folder, subfolder)) + for subfolder in FUTURE_SOURCE_SUBFOLDERS]): + # Found it. Remove it. + sys.path.remove(folder) + + # Ensure we import the system module: + for m in self.module_names: + # Delete the module and any submodules from sys.modules: + # for key in list(sys.modules): + # if key == m or key.startswith(m + '.'): + # try: + # del sys.modules[key] + # except KeyError: + # pass + try: + module = __import__(m, level=0) + except ImportError: + # There's a problem importing the system module. E.g. the + # winreg module is not available except on Windows. + pass + + def __exit__(self, *args): + # Restore sys.path and sys.modules: + sys.path = self.old_sys_path + for m in set(self.old_sys_modules.keys()) - set(sys.modules.keys()): + sys.modules[m] = self.old_sys_modules[m] + +TOP_LEVEL_MODULES = ['builtins', + 'copyreg', + 'html', + 'http', + 'queue', + 'reprlib', + 'socketserver', + 'test', + 'tkinter', + 'winreg', + 'xmlrpc', + '_dummy_thread', + '_markupbase', + '_thread', + ] + +def import_top_level_modules(): + with exclude_local_folder_imports(*TOP_LEVEL_MODULES): + for m in TOP_LEVEL_MODULES: + try: + __import__(m) + except ImportError: # e.g. winreg + pass diff --git a/.install/.kodi/addons/script.module.future/libs/future/tests/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/script.module.future/libs/future/tests/base.py b/.install/.kodi/addons/script.module.future/libs/future/tests/base.py new file mode 100644 index 000000000..9f4607b69 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/tests/base.py @@ -0,0 +1,531 @@ +from __future__ import print_function, absolute_import +import os +import tempfile +import unittest +import sys +import re +import warnings +import io +from textwrap import dedent + +from future.utils import bind_method, PY26, PY3, PY2, PY27 +from future.moves.subprocess import check_output, STDOUT, CalledProcessError + +if PY26: + import unittest2 as unittest + + +def reformat_code(code): + """ + Removes any leading \n and dedents. + """ + if code.startswith('\n'): + code = code[1:] + return dedent(code) + + +def order_future_lines(code): + """ + Returns the code block with any ``__future__`` import lines sorted, and + then any ``future`` import lines sorted, then any ``builtins`` import lines + sorted. + + This only sorts the lines within the expected blocks. + + See test_order_future_lines() for an example. + """ + + # We need .splitlines(keepends=True), which doesn't exist on Py2, + # so we use this instead: + lines = code.split('\n') + + uufuture_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from __future__ import ')] + + future_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from future') + or line.startswith('from past')] + + builtins_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from builtins')] + + assert code.lstrip() == code, ('internal usage error: ' + 'dedent the code before calling order_future_lines()') + + def mymax(numbers): + return max(numbers) if len(numbers) > 0 else 0 + + def mymin(numbers): + return min(numbers) if len(numbers) > 0 else float('inf') + + assert mymax(uufuture_line_numbers) <= mymin(future_line_numbers), \ + 'the __future__ and future imports are out of order' + + # assert mymax(future_line_numbers) <= mymin(builtins_line_numbers), \ + # 'the future and builtins imports are out of order' + + uul = sorted([lines[i] for i in uufuture_line_numbers]) + sorted_uufuture_lines = dict(zip(uufuture_line_numbers, uul)) + + fl = sorted([lines[i] for i in future_line_numbers]) + sorted_future_lines = dict(zip(future_line_numbers, fl)) + + bl = sorted([lines[i] for i in builtins_line_numbers]) + sorted_builtins_lines = dict(zip(builtins_line_numbers, bl)) + + # Replace the old unsorted "from __future__ import ..." lines with the + # new sorted ones: + new_lines = [] + for i in range(len(lines)): + if i in uufuture_line_numbers: + new_lines.append(sorted_uufuture_lines[i]) + elif i in future_line_numbers: + new_lines.append(sorted_future_lines[i]) + elif i in builtins_line_numbers: + new_lines.append(sorted_builtins_lines[i]) + else: + new_lines.append(lines[i]) + return '\n'.join(new_lines) + + +class VerboseCalledProcessError(CalledProcessError): + """ + Like CalledProcessError, but it displays more information (message and + script output) for diagnosing test failures etc. + """ + def __init__(self, msg, returncode, cmd, output=None): + self.msg = msg + self.returncode = returncode + self.cmd = cmd + self.output = output + + def __str__(self): + return ("Command '%s' failed with exit status %d\nMessage: %s\nOutput: %s" + % (self.cmd, self.returncode, self.msg, self.output)) + +class FuturizeError(VerboseCalledProcessError): + pass + +class PasteurizeError(VerboseCalledProcessError): + pass + + +class CodeHandler(unittest.TestCase): + """ + Handy mixin for test classes for writing / reading / futurizing / + running .py files in the test suite. + """ + def setUp(self): + """ + The outputs from the various futurize stages should have the + following headers: + """ + # After stage1: + # TODO: use this form after implementing a fixer to consolidate + # __future__ imports into a single line: + # self.headers1 = """ + # from __future__ import absolute_import, division, print_function + # """ + self.headers1 = reformat_code(""" + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + """) + + # After stage2 --all-imports: + # TODO: use this form after implementing a fixer to consolidate + # __future__ imports into a single line: + # self.headers2 = """ + # from __future__ import (absolute_import, division, + # print_function, unicode_literals) + # from future import standard_library + # from future.builtins import * + # """ + self.headers2 = reformat_code(""" + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + from future import standard_library + standard_library.install_aliases() + from builtins import * + """) + self.interpreters = [sys.executable] + self.tempdir = tempfile.mkdtemp() + os.path.sep + pypath = os.getenv('PYTHONPATH') + if pypath: + self.env = {'PYTHONPATH': os.getcwd() + os.pathsep + pypath} + else: + self.env = {'PYTHONPATH': os.getcwd()} + + def convert(self, code, stages=(1, 2), all_imports=False, from3=False, + reformat=True, run=True, conservative=False): + """ + Converts the code block using ``futurize`` and returns the + resulting code. + + Passing stages=[1] or stages=[2] passes the flag ``--stage1`` or + ``stage2`` to ``futurize``. Passing both stages runs ``futurize`` + with both stages by default. + + If from3 is False, runs ``futurize``, converting from Python 2 to + both 2 and 3. If from3 is True, runs ``pasteurize`` to convert + from Python 3 to both 2 and 3. + + Optionally reformats the code block first using the reformat() function. + + If run is True, runs the resulting code under all Python + interpreters in self.interpreters. + """ + if reformat: + code = reformat_code(code) + self._write_test_script(code) + self._futurize_test_script(stages=stages, all_imports=all_imports, + from3=from3, conservative=conservative) + output = self._read_test_script() + if run: + for interpreter in self.interpreters: + _ = self._run_test_script(interpreter=interpreter) + return output + + def compare(self, output, expected, ignore_imports=True): + """ + Compares whether the code blocks are equal. If not, raises an + exception so the test fails. Ignores any trailing whitespace like + blank lines. + + If ignore_imports is True, passes the code blocks into the + strip_future_imports method. + + If one code block is a unicode string and the other a + byte-string, it assumes the byte-string is encoded as utf-8. + """ + if ignore_imports: + output = self.strip_future_imports(output) + expected = self.strip_future_imports(expected) + if isinstance(output, bytes) and not isinstance(expected, bytes): + output = output.decode('utf-8') + if isinstance(expected, bytes) and not isinstance(output, bytes): + expected = expected.decode('utf-8') + self.assertEqual(order_future_lines(output.rstrip()), + expected.rstrip()) + + def strip_future_imports(self, code): + """ + Strips any of these import lines: + + from __future__ import + from future + from future. + from builtins + + or any line containing: + install_hooks() + or: + install_aliases() + + Limitation: doesn't handle imports split across multiple lines like + this: + + from __future__ import (absolute_import, division, print_function, + unicode_literals) + """ + output = [] + # We need .splitlines(keepends=True), which doesn't exist on Py2, + # so we use this instead: + for line in code.split('\n'): + if not (line.startswith('from __future__ import ') + or line.startswith('from future ') + or line.startswith('from builtins ') + or 'install_hooks()' in line + or 'install_aliases()' in line + # but don't match "from future_builtins" :) + or line.startswith('from future.')): + output.append(line) + return '\n'.join(output) + + def convert_check(self, before, expected, stages=(1, 2), all_imports=False, + ignore_imports=True, from3=False, run=True, + conservative=False): + """ + Convenience method that calls convert() and compare(). + + Reformats the code blocks automatically using the reformat_code() + function. + + If all_imports is passed, we add the appropriate import headers + for the stage(s) selected to the ``expected`` code-block, so they + needn't appear repeatedly in the test code. + + If ignore_imports is True, ignores the presence of any lines + beginning: + + from __future__ import ... + from future import ... + + for the purpose of the comparison. + """ + output = self.convert(before, stages=stages, all_imports=all_imports, + from3=from3, run=run, conservative=conservative) + if all_imports: + headers = self.headers2 if 2 in stages else self.headers1 + else: + headers = '' + + self.compare(output, headers + reformat_code(expected), + ignore_imports=ignore_imports) + + def unchanged(self, code, **kwargs): + """ + Convenience method to ensure the code is unchanged by the + futurize process. + """ + self.convert_check(code, code, **kwargs) + + def _write_test_script(self, code, filename='mytestscript.py'): + """ + Dedents the given code (a multiline string) and writes it out to + a file in a temporary folder like /tmp/tmpUDCn7x/mytestscript.py. + """ + if isinstance(code, bytes): + code = code.decode('utf-8') + # Be explicit about encoding the temp file as UTF-8 (issue #63): + with io.open(self.tempdir + filename, 'wt', encoding='utf-8') as f: + f.write(dedent(code)) + + def _read_test_script(self, filename='mytestscript.py'): + with io.open(self.tempdir + filename, 'rt', encoding='utf-8') as f: + newsource = f.read() + return newsource + + def _futurize_test_script(self, filename='mytestscript.py', stages=(1, 2), + all_imports=False, from3=False, + conservative=False): + params = [] + stages = list(stages) + if all_imports: + params.append('--all-imports') + if from3: + script = 'pasteurize.py' + else: + script = 'futurize.py' + if stages == [1]: + params.append('--stage1') + elif stages == [2]: + params.append('--stage2') + else: + assert stages == [1, 2] + if conservative: + params.append('--conservative') + # No extra params needed + + # Absolute file path: + fn = self.tempdir + filename + call_args = [sys.executable, script] + params + ['-w', fn] + try: + output = check_output(call_args, stderr=STDOUT, env=self.env) + except CalledProcessError as e: + with open(fn) as f: + msg = ( + 'Error running the command %s\n' + '%s\n' + 'Contents of file %s:\n' + '\n' + '%s') % ( + ' '.join(call_args), + 'env=%s' % self.env, + fn, + '----\n%s\n----' % f.read(), + ) + ErrorClass = (FuturizeError if 'futurize' in script else PasteurizeError) + raise ErrorClass(msg, e.returncode, e.cmd, output=e.output) + return output + + def _run_test_script(self, filename='mytestscript.py', + interpreter=sys.executable): + # Absolute file path: + fn = self.tempdir + filename + try: + output = check_output([interpreter, fn], + env=self.env, stderr=STDOUT) + except CalledProcessError as e: + with open(fn) as f: + msg = ( + 'Error running the command %s\n' + '%s\n' + 'Contents of file %s:\n' + '\n' + '%s') % ( + ' '.join([interpreter, fn]), + 'env=%s' % self.env, + fn, + '----\n%s\n----' % f.read(), + ) + if not hasattr(e, 'output'): + # The attribute CalledProcessError.output doesn't exist on Py2.6 + e.output = None + raise VerboseCalledProcessError(msg, e.returncode, e.cmd, output=e.output) + return output + + +# Decorator to skip some tests on Python 2.6 ... +skip26 = unittest.skipIf(PY26, "this test is known to fail on Py2.6") + + +def expectedFailurePY3(func): + if not PY3: + return func + return unittest.expectedFailure(func) + +def expectedFailurePY26(func): + if not PY26: + return func + return unittest.expectedFailure(func) + + +def expectedFailurePY27(func): + if not PY27: + return func + return unittest.expectedFailure(func) + + +def expectedFailurePY2(func): + if not PY2: + return func + return unittest.expectedFailure(func) + + +# Renamed in Py3.3: +if not hasattr(unittest.TestCase, 'assertRaisesRegex'): + unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + +# From Py3.3: +def assertRegex(self, text, expected_regex, msg=None): + """Fail the test unless the text matches the regular expression.""" + if isinstance(expected_regex, (str, unicode)): + assert expected_regex, "expected_regex must not be empty." + expected_regex = re.compile(expected_regex) + if not expected_regex.search(text): + msg = msg or "Regex didn't match" + msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text) + raise self.failureException(msg) + +if not hasattr(unittest.TestCase, 'assertRegex'): + bind_method(unittest.TestCase, 'assertRegex', assertRegex) + +class _AssertRaisesBaseContext(object): + + def __init__(self, expected, test_case, callable_obj=None, + expected_regex=None): + self.expected = expected + self.test_case = test_case + if callable_obj is not None: + try: + self.obj_name = callable_obj.__name__ + except AttributeError: + self.obj_name = str(callable_obj) + else: + self.obj_name = None + if isinstance(expected_regex, (bytes, str)): + expected_regex = re.compile(expected_regex) + self.expected_regex = expected_regex + self.msg = None + + def _raiseFailure(self, standardMsg): + msg = self.test_case._formatMessage(self.msg, standardMsg) + raise self.test_case.failureException(msg) + + def handle(self, name, callable_obj, args, kwargs): + """ + If callable_obj is None, assertRaises/Warns is being used as a + context manager, so check for a 'msg' kwarg and return self. + If callable_obj is not None, call it passing args and kwargs. + """ + if callable_obj is None: + self.msg = kwargs.pop('msg', None) + return self + with self: + callable_obj(*args, **kwargs) + +class _AssertWarnsContext(_AssertRaisesBaseContext): + """A context manager used to implement TestCase.assertWarns* methods.""" + + def __enter__(self): + # The __warningregistry__'s need to be in a pristine state for tests + # to work properly. + for v in sys.modules.values(): + if getattr(v, '__warningregistry__', None): + v.__warningregistry__ = {} + self.warnings_manager = warnings.catch_warnings(record=True) + self.warnings = self.warnings_manager.__enter__() + warnings.simplefilter("always", self.expected) + return self + + def __exit__(self, exc_type, exc_value, tb): + self.warnings_manager.__exit__(exc_type, exc_value, tb) + if exc_type is not None: + # let unexpected exceptions pass through + return + try: + exc_name = self.expected.__name__ + except AttributeError: + exc_name = str(self.expected) + first_matching = None + for m in self.warnings: + w = m.message + if not isinstance(w, self.expected): + continue + if first_matching is None: + first_matching = w + if (self.expected_regex is not None and + not self.expected_regex.search(str(w))): + continue + # store warning for later retrieval + self.warning = w + self.filename = m.filename + self.lineno = m.lineno + return + # Now we simply try to choose a helpful failure message + if first_matching is not None: + self._raiseFailure('"{}" does not match "{}"'.format( + self.expected_regex.pattern, str(first_matching))) + if self.obj_name: + self._raiseFailure("{} not triggered by {}".format(exc_name, + self.obj_name)) + else: + self._raiseFailure("{} not triggered".format(exc_name)) + + +def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs): + """Fail unless a warning of class warnClass is triggered + by callable_obj when invoked with arguments args and keyword + arguments kwargs. If a different type of warning is + triggered, it will not be handled: depending on the other + warning filtering rules in effect, it might be silenced, printed + out, or raised as an exception. + + If called with callable_obj omitted or None, will return a + context object used like this:: + + with self.assertWarns(SomeWarning): + do_something() + + An optional keyword argument 'msg' can be provided when assertWarns + is used as a context object. + + The context manager keeps a reference to the first matching + warning as the 'warning' attribute; similarly, the 'filename' + and 'lineno' attributes give you information about the line + of Python code from which the warning was triggered. + This allows you to inspect the warning after the assertion:: + + with self.assertWarns(SomeWarning) as cm: + do_something() + the_warning = cm.warning + self.assertEqual(the_warning.some_attribute, 147) + """ + context = _AssertWarnsContext(expected_warning, self, callable_obj) + return context.handle('assertWarns', callable_obj, args, kwargs) + +if not hasattr(unittest.TestCase, 'assertWarns'): + bind_method(unittest.TestCase, 'assertWarns', assertWarns) diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/types/__init__.py new file mode 100644 index 000000000..062507703 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/__init__.py @@ -0,0 +1,257 @@ +""" +This module contains backports the data types that were significantly changed +in the transition from Python 2 to Python 3. + +- an implementation of Python 3's bytes object (pure Python subclass of + Python 2's builtin 8-bit str type) +- an implementation of Python 3's str object (pure Python subclass of + Python 2's builtin unicode type) +- a backport of the range iterator from Py3 with slicing support + +It is used as follows:: + + from __future__ import division, absolute_import, print_function + from builtins import bytes, dict, int, range, str + +to bring in the new semantics for these functions from Python 3. And +then, for example:: + + b = bytes(b'ABCD') + assert list(b) == [65, 66, 67, 68] + assert repr(b) == "b'ABCD'" + assert [65, 66] in b + + # These raise TypeErrors: + # b + u'EFGH' + # b.split(u'B') + # bytes(b',').join([u'Fred', u'Bill']) + + + s = str(u'ABCD') + + # These raise TypeErrors: + # s.join([b'Fred', b'Bill']) + # s.startswith(b'A') + # b'B' in s + # s.find(b'A') + # s.replace(u'A', b'a') + + # This raises an AttributeError: + # s.decode('utf-8') + + assert repr(s) == 'ABCD' # consistent repr with Py3 (no u prefix) + + + for i in range(10**11)[:10]: + pass + +and:: + + class VerboseList(list): + def append(self, item): + print('Adding an item') + super().append(item) # new simpler super() function + +For more information: +--------------------- + +- future.types.newbytes +- future.types.newdict +- future.types.newint +- future.types.newobject +- future.types.newrange +- future.types.newstr + + +Notes +===== + +range() +------- +``range`` is a custom class that backports the slicing behaviour from +Python 3 (based on the ``xrange`` module by Dan Crosta). See the +``newrange`` module docstring for more details. + + +super() +------- +``super()`` is based on Ryan Kelly's ``magicsuper`` module. See the +``newsuper`` module docstring for more details. + + +round() +------- +Python 3 modifies the behaviour of ``round()`` to use "Banker's Rounding". +See http://stackoverflow.com/a/10825998. See the ``newround`` module +docstring for more details. + +""" + +from __future__ import absolute_import, division, print_function + +import functools +from numbers import Integral + +from future import utils + + +# Some utility functions to enforce strict type-separation of unicode str and +# bytes: +def disallow_types(argnums, disallowed_types): + """ + A decorator that raises a TypeError if any of the given numbered + arguments is of the corresponding given type (e.g. bytes or unicode + string). + + For example: + + @disallow_types([0, 1], [unicode, bytes]) + def f(a, b): + pass + + raises a TypeError when f is called if a unicode object is passed as + `a` or a bytes object is passed as `b`. + + This also skips over keyword arguments, so + + @disallow_types([0, 1], [unicode, bytes]) + def g(a, b=None): + pass + + doesn't raise an exception if g is called with only one argument a, + e.g.: + + g(b'Byte string') + + Example use: + + >>> class newbytes(object): + ... @disallow_types([1], [unicode]) + ... def __add__(self, other): + ... pass + + >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: can't concat 'bytes' to (unicode) str + """ + + def decorator(function): + + @functools.wraps(function) + def wrapper(*args, **kwargs): + # These imports are just for this decorator, and are defined here + # to prevent circular imports: + from .newbytes import newbytes + from .newint import newint + from .newstr import newstr + + errmsg = "argument can't be {0}" + for (argnum, mytype) in zip(argnums, disallowed_types): + # Handle the case where the type is passed as a string like 'newbytes'. + if isinstance(mytype, str) or isinstance(mytype, bytes): + mytype = locals()[mytype] + + # Only restrict kw args only if they are passed: + if len(args) <= argnum: + break + + # Here we use type() rather than isinstance() because + # __instancecheck__ is being overridden. E.g. + # isinstance(b'abc', newbytes) is True on Py2. + if type(args[argnum]) == mytype: + raise TypeError(errmsg.format(mytype)) + + return function(*args, **kwargs) + return wrapper + return decorator + + +def no(mytype, argnums=(1,)): + """ + A shortcut for the disallow_types decorator that disallows only one type + (in any position in argnums). + + Example use: + + >>> class newstr(object): + ... @no('bytes') + ... def __add__(self, other): + ... pass + + >>> newstr(u'1234') + b'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: argument can't be bytes + + The object can also be passed directly, but passing the string helps + to prevent circular import problems. + """ + if isinstance(argnums, Integral): + argnums = (argnums,) + disallowed_types = [mytype] * len(argnums) + return disallow_types(argnums, disallowed_types) + + +def issubset(list1, list2): + """ + Examples: + + >>> issubset([], [65, 66, 67]) + True + >>> issubset([65], [65, 66, 67]) + True + >>> issubset([65, 66], [65, 66, 67]) + True + >>> issubset([65, 67], [65, 66, 67]) + False + """ + n = len(list1) + for startpos in range(len(list2) - n + 1): + if list2[startpos:startpos+n] == list1: + return True + return False + + +if utils.PY3: + import builtins + bytes = builtins.bytes + dict = builtins.dict + int = builtins.int + list = builtins.list + object = builtins.object + range = builtins.range + str = builtins.str + + # The identity mapping + newtypes = {bytes: bytes, + dict: dict, + int: int, + list: list, + object: object, + range: range, + str: str} + + __all__ = ['newtypes'] + +else: + + from .newbytes import newbytes + from .newdict import newdict + from .newint import newint + from .newlist import newlist + from .newrange import newrange + from .newobject import newobject + from .newstr import newstr + + newtypes = {bytes: newbytes, + dict: newdict, + int: newint, + long: newint, + list: newlist, + object: newobject, + range: newrange, + str: newbytes, + unicode: newstr} + + __all__ = ['newbytes', 'newdict', 'newint', 'newlist', 'newrange', 'newstr', 'newtypes'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newbytes.py b/.install/.kodi/addons/script.module.future/libs/future/types/newbytes.py new file mode 100644 index 000000000..2a337c864 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newbytes.py @@ -0,0 +1,456 @@ +""" +Pure-Python implementation of a Python 3-like bytes object for Python 2. + +Why do this? Without it, the Python 2 bytes object is a very, very +different beast to the Python 3 bytes object. +""" + +from collections import Iterable +from numbers import Integral +import string +import copy + +from future.utils import istext, isbytes, PY3, with_metaclass +from future.types import no, issubset +from future.types.newobject import newobject + + +_builtin_bytes = bytes + +if PY3: + # We'll probably never use newstr on Py3 anyway... + unicode = str + + +class BaseNewBytes(type): + def __instancecheck__(cls, instance): + if cls == newbytes: + return isinstance(instance, _builtin_bytes) + else: + return issubclass(instance.__class__, cls) + + +def _newchr(x): + if isinstance(x, str): # this happens on pypy + return x.encode('ascii') + else: + return chr(x) + + +class newbytes(with_metaclass(BaseNewBytes, _builtin_bytes)): + """ + A backport of the Python 3 bytes object to Py2 + """ + def __new__(cls, *args, **kwargs): + """ + From the Py3 bytes docstring: + + bytes(iterable_of_ints) -> bytes + bytes(string, encoding[, errors]) -> bytes + bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer + bytes(int) -> bytes object of size given by the parameter initialized with null bytes + bytes() -> empty bytes object + + Construct an immutable array of bytes from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - any object implementing the buffer API. + - an integer + """ + + encoding = None + errors = None + + if len(args) == 0: + return super(newbytes, cls).__new__(cls) + elif len(args) >= 2: + args = list(args) + if len(args) == 3: + errors = args.pop() + encoding=args.pop() + # Was: elif isinstance(args[0], newbytes): + # We use type() instead of the above because we're redefining + # this to be True for all unicode string subclasses. Warning: + # This may render newstr un-subclassable. + if type(args[0]) == newbytes: + # Special-case: for consistency with Py3.3, we return the same object + # (with the same id) if a newbytes object is passed into the + # newbytes constructor. + return args[0] + elif isinstance(args[0], _builtin_bytes): + value = args[0] + elif isinstance(args[0], unicode): + try: + if 'encoding' in kwargs: + assert encoding is None + encoding = kwargs['encoding'] + if 'errors' in kwargs: + assert errors is None + errors = kwargs['errors'] + except AssertionError: + raise TypeError('Argument given by name and position') + if encoding is None: + raise TypeError('unicode string argument without an encoding') + ### + # Was: value = args[0].encode(**kwargs) + # Python 2.6 string encode() method doesn't take kwargs: + # Use this instead: + newargs = [encoding] + if errors is not None: + newargs.append(errors) + value = args[0].encode(*newargs) + ### + elif hasattr(args[0], '__bytes__'): + value = args[0].__bytes__() + elif isinstance(args[0], Iterable): + if len(args[0]) == 0: + # This could be an empty list or tuple. Return b'' as on Py3. + value = b'' + else: + # Was: elif len(args[0])>0 and isinstance(args[0][0], Integral): + # # It's a list of integers + # But then we can't index into e.g. frozensets. Try to proceed + # anyway. + try: + value = bytearray([_newchr(x) for x in args[0]]) + except: + raise ValueError('bytes must be in range(0, 256)') + elif isinstance(args[0], Integral): + if args[0] < 0: + raise ValueError('negative count') + value = b'\x00' * args[0] + else: + value = args[0] + if type(value) == newbytes: + # Above we use type(...) rather than isinstance(...) because the + # newbytes metaclass overrides __instancecheck__. + # oldbytes(value) gives the wrong thing on Py2: the same + # result as str(value) on Py3, e.g. "b'abc'". (Issue #193). + # So we handle this case separately: + return copy.copy(value) + else: + return super(newbytes, cls).__new__(cls, value) + + def __repr__(self): + return 'b' + super(newbytes, self).__repr__() + + def __str__(self): + return 'b' + "'{0}'".format(super(newbytes, self).__str__()) + + def __getitem__(self, y): + value = super(newbytes, self).__getitem__(y) + if isinstance(y, Integral): + return ord(value) + else: + return newbytes(value) + + def __getslice__(self, *args): + return self.__getitem__(slice(*args)) + + def __contains__(self, key): + if isinstance(key, int): + newbyteskey = newbytes([key]) + # Don't use isinstance() here because we only want to catch + # newbytes, not Python 2 str: + elif type(key) == newbytes: + newbyteskey = key + else: + newbyteskey = newbytes(key) + return issubset(list(newbyteskey), list(self)) + + @no(unicode) + def __add__(self, other): + return newbytes(super(newbytes, self).__add__(other)) + + @no(unicode) + def __radd__(self, left): + return newbytes(left) + self + + @no(unicode) + def __mul__(self, other): + return newbytes(super(newbytes, self).__mul__(other)) + + @no(unicode) + def __rmul__(self, other): + return newbytes(super(newbytes, self).__rmul__(other)) + + def __mod__(self, vals): + if isinstance(vals, newbytes): + vals = _builtin_bytes.__str__(vals) + + elif isinstance(vals, tuple): + newvals = [] + for v in vals: + if isinstance(v, newbytes): + v = _builtin_bytes.__str__(v) + newvals.append(v) + vals = tuple(newvals) + + elif (hasattr(vals.__class__, '__getitem__') and + hasattr(vals.__class__, 'iteritems')): + for k, v in vals.iteritems(): + if isinstance(v, newbytes): + vals[k] = _builtin_bytes.__str__(v) + + return _builtin_bytes.__mod__(self, vals) + + def __imod__(self, other): + return self.__mod__(other) + + def join(self, iterable_of_bytes): + errmsg = 'sequence item {0}: expected bytes, {1} found' + if isbytes(iterable_of_bytes) or istext(iterable_of_bytes): + raise TypeError(errmsg.format(0, type(iterable_of_bytes))) + for i, item in enumerate(iterable_of_bytes): + if istext(item): + raise TypeError(errmsg.format(i, type(item))) + return newbytes(super(newbytes, self).join(iterable_of_bytes)) + + @classmethod + def fromhex(cls, string): + # Only on Py2: + return cls(string.replace(' ', '').decode('hex')) + + @no(unicode) + def find(self, sub, *args): + return super(newbytes, self).find(sub, *args) + + @no(unicode) + def rfind(self, sub, *args): + return super(newbytes, self).rfind(sub, *args) + + @no(unicode, (1, 2)) + def replace(self, old, new, *args): + return newbytes(super(newbytes, self).replace(old, new, *args)) + + def encode(self, *args): + raise AttributeError("encode method has been disabled in newbytes") + + def decode(self, encoding='utf-8', errors='strict'): + """ + Returns a newstr (i.e. unicode subclass) + + Decode B using the codec registered for encoding. Default encoding + is 'utf-8'. errors may be given to set a different error + handling scheme. Default is 'strict' meaning that encoding errors raise + a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that is + able to handle UnicodeDecodeErrors. + """ + # Py2 str.encode() takes encoding and errors as optional parameter, + # not keyword arguments as in Python 3 str. + + from future.types.newstr import newstr + + if errors == 'surrogateescape': + from future.utils.surrogateescape import register_surrogateescape + register_surrogateescape() + + return newstr(super(newbytes, self).decode(encoding, errors)) + + # This is currently broken: + # # We implement surrogateescape error handling here in addition rather + # # than relying on the custom error handler from + # # future.utils.surrogateescape to be registered globally, even though + # # that is fine in the case of decoding. (But not encoding: see the + # # comments in newstr.encode()``.) + # + # if errors == 'surrogateescape': + # # Decode char by char + # mybytes = [] + # for code in self: + # # Code is an int + # if 0x80 <= code <= 0xFF: + # b = 0xDC00 + code + # elif code <= 0x7F: + # b = _unichr(c).decode(encoding=encoding) + # else: + # # # It may be a bad byte + # # FIXME: What to do in this case? See the Py3 docs / tests. + # # # Try swallowing it. + # # continue + # # print("RAISE!") + # raise NotASurrogateError + # mybytes.append(b) + # return newbytes(mybytes) + # return newbytes(super(newstr, self).decode(encoding, errors)) + + @no(unicode) + def startswith(self, prefix, *args): + return super(newbytes, self).startswith(prefix, *args) + + @no(unicode) + def endswith(self, prefix, *args): + return super(newbytes, self).endswith(prefix, *args) + + @no(unicode) + def split(self, sep=None, maxsplit=-1): + # Py2 str.split() takes maxsplit as an optional parameter, not as a + # keyword argument as in Python 3 bytes. + parts = super(newbytes, self).split(sep, maxsplit) + return [newbytes(part) for part in parts] + + def splitlines(self, keepends=False): + """ + B.splitlines([keepends]) -> list of lines + + Return a list of the lines in B, breaking at line boundaries. + Line breaks are not included in the resulting list unless keepends + is given and true. + """ + # Py2 str.splitlines() takes keepends as an optional parameter, + # not as a keyword argument as in Python 3 bytes. + parts = super(newbytes, self).splitlines(keepends) + return [newbytes(part) for part in parts] + + @no(unicode) + def rsplit(self, sep=None, maxsplit=-1): + # Py2 str.rsplit() takes maxsplit as an optional parameter, not as a + # keyword argument as in Python 3 bytes. + parts = super(newbytes, self).rsplit(sep, maxsplit) + return [newbytes(part) for part in parts] + + @no(unicode) + def partition(self, sep): + parts = super(newbytes, self).partition(sep) + return tuple(newbytes(part) for part in parts) + + @no(unicode) + def rpartition(self, sep): + parts = super(newbytes, self).rpartition(sep) + return tuple(newbytes(part) for part in parts) + + @no(unicode, (1,)) + def rindex(self, sub, *args): + ''' + S.rindex(sub [,start [,end]]) -> int + + Like S.rfind() but raise ValueError when the substring is not found. + ''' + pos = self.rfind(sub, *args) + if pos == -1: + raise ValueError('substring not found') + + @no(unicode) + def index(self, sub, *args): + ''' + Returns index of sub in bytes. + Raises ValueError if byte is not in bytes and TypeError if can't + be converted bytes or its length is not 1. + ''' + if isinstance(sub, int): + if len(args) == 0: + start, end = 0, len(self) + elif len(args) == 1: + start = args[0] + elif len(args) == 2: + start, end = args + else: + raise TypeError('takes at most 3 arguments') + return list(self)[start:end].index(sub) + if not isinstance(sub, bytes): + try: + sub = self.__class__(sub) + except (TypeError, ValueError): + raise TypeError("can't convert sub to bytes") + try: + return super(newbytes, self).index(sub, *args) + except ValueError: + raise ValueError('substring not found') + + def __eq__(self, other): + if isinstance(other, (_builtin_bytes, bytearray)): + return super(newbytes, self).__eq__(other) + else: + return False + + def __ne__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ne__(other) + else: + return True + + unorderable_err = 'unorderable types: bytes() and {0}' + + def __lt__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __le__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __gt__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __ge__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __native__(self): + # We can't just feed a newbytes object into str(), because + # newbytes.__str__() returns e.g. "b'blah'", consistent with Py3 bytes. + return super(newbytes, self).__str__() + + def __getattribute__(self, name): + """ + A trick to cause the ``hasattr`` builtin-fn to return False for + the 'encode' method on Py2. + """ + if name in ['encode', u'encode']: + raise AttributeError("encode method has been disabled in newbytes") + return super(newbytes, self).__getattribute__(name) + + @no(unicode) + def rstrip(self, bytes_to_strip=None): + """ + Strip trailing bytes contained in the argument. + If the argument is omitted, strip trailing ASCII whitespace. + """ + return newbytes(super(newbytes, self).rstrip(bytes_to_strip)) + + @no(unicode) + def strip(self, bytes_to_strip=None): + """ + Strip leading and trailing bytes contained in the argument. + If the argument is omitted, strip trailing ASCII whitespace. + """ + return newbytes(super(newbytes, self).strip(bytes_to_strip)) + + def lower(self): + """ + b.lower() -> copy of b + + Return a copy of b with all ASCII characters converted to lowercase. + """ + return newbytes(super(newbytes, self).lower()) + + @no(unicode) + def upper(self): + """ + b.upper() -> copy of b + + Return a copy of b with all ASCII characters converted to uppercase. + """ + return newbytes(super(newbytes, self).upper()) + + @classmethod + @no(unicode) + def maketrans(cls, frm, to): + """ + B.maketrans(frm, to) -> translation table + + Return a translation table (a bytes object of length 256) suitable + for use in the bytes or bytearray translate method where each byte + in frm is mapped to the byte at the same position in to. + The bytes objects frm and to must be of the same length. + """ + return newbytes(string.maketrans(frm, to)) + + +__all__ = ['newbytes'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newdict.py b/.install/.kodi/addons/script.module.future/libs/future/types/newdict.py new file mode 100644 index 000000000..3f3a559dd --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newdict.py @@ -0,0 +1,111 @@ +""" +A dict subclass for Python 2 that behaves like Python 3's dict + +Example use: + +>>> from builtins import dict +>>> d1 = dict() # instead of {} for an empty dict +>>> d2 = dict(key1='value1', key2='value2') + +The keys, values and items methods now return iterators on Python 2.x +(with set-like behaviour on Python 2.7). + +>>> for d in (d1, d2): +... assert not isinstance(d.keys(), list) +... assert not isinstance(d.values(), list) +... assert not isinstance(d.items(), list) +""" + +import sys + +from future.utils import with_metaclass +from future.types.newobject import newobject + + +_builtin_dict = dict +ver = sys.version_info[:2] + + +class BaseNewDict(type): + def __instancecheck__(cls, instance): + if cls == newdict: + return isinstance(instance, _builtin_dict) + else: + return issubclass(instance.__class__, cls) + + +class newdict(with_metaclass(BaseNewDict, _builtin_dict)): + """ + A backport of the Python 3 dict object to Py2 + """ + def items(self): + """ + On Python 2.7+: + D.items() -> a set-like object providing a view on D's items + On Python 2.6: + D.items() -> an iterator over D's items + """ + if ver == (2, 7): + return self.viewitems() + elif ver == (2, 6): + return self.iteritems() + elif ver >= (3, 0): + return self.items() + + def keys(self): + """ + On Python 2.7+: + D.keys() -> a set-like object providing a view on D's keys + On Python 2.6: + D.keys() -> an iterator over D's keys + """ + if ver == (2, 7): + return self.viewkeys() + elif ver == (2, 6): + return self.iterkeys() + elif ver >= (3, 0): + return self.keys() + + def values(self): + """ + On Python 2.7+: + D.values() -> a set-like object providing a view on D's values + On Python 2.6: + D.values() -> an iterator over D's values + """ + if ver == (2, 7): + return self.viewvalues() + elif ver == (2, 6): + return self.itervalues() + elif ver >= (3, 0): + return self.values() + + def __new__(cls, *args, **kwargs): + """ + dict() -> new empty dictionary + dict(mapping) -> new dictionary initialized from a mapping object's + (key, value) pairs + dict(iterable) -> new dictionary initialized as if via: + d = {} + for k, v in iterable: + d[k] = v + dict(**kwargs) -> new dictionary initialized with the name=value pairs + in the keyword argument list. For example: dict(one=1, two=2) + """ + + if len(args) == 0: + return super(newdict, cls).__new__(cls) + elif type(args[0]) == newdict: + value = args[0] + else: + value = args[0] + return super(newdict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return dict(self) + + +__all__ = ['newdict'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newint.py b/.install/.kodi/addons/script.module.future/libs/future/types/newint.py new file mode 100644 index 000000000..705b8fa95 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newint.py @@ -0,0 +1,379 @@ +""" +Backport of Python 3's int, based on Py2's long. + +They are very similar. The most notable difference is: + +- representation: trailing L in Python 2 removed in Python 3 +""" +from __future__ import division + +import struct +import collections + +from future.types.newbytes import newbytes +from future.types.newobject import newobject +from future.utils import PY3, isint, istext, isbytes, with_metaclass, native + + +if PY3: + long = int + + +class BaseNewInt(type): + def __instancecheck__(cls, instance): + if cls == newint: + # Special case for Py2 short or long int + return isinstance(instance, (int, long)) + else: + return issubclass(instance.__class__, cls) + + +class newint(with_metaclass(BaseNewInt, long)): + """ + A backport of the Python 3 int object to Py2 + """ + def __new__(cls, x=0, base=10): + """ + From the Py3 int docstring: + + | int(x=0) -> integer + | int(x, base=10) -> integer + | + | Convert a number or string to an integer, or return 0 if no + | arguments are given. If x is a number, return x.__int__(). For + | floating point numbers, this truncates towards zero. + | + | If x is not a number or if base is given, then x must be a string, + | bytes, or bytearray instance representing an integer literal in the + | given base. The literal can be preceded by '+' or '-' and be + | surrounded by whitespace. The base defaults to 10. Valid bases are + | 0 and 2-36. Base 0 means to interpret the base from the string as an + | integer literal. + | >>> int('0b100', base=0) + | 4 + + """ + try: + val = x.__int__() + except AttributeError: + val = x + else: + if not isint(val): + raise TypeError('__int__ returned non-int ({0})'.format( + type(val))) + + if base != 10: + # Explicit base + if not (istext(val) or isbytes(val) or isinstance(val, bytearray)): + raise TypeError( + "int() can't convert non-string with explicit base") + try: + return super(newint, cls).__new__(cls, val, base) + except TypeError: + return super(newint, cls).__new__(cls, newbytes(val), base) + # After here, base is 10 + try: + return super(newint, cls).__new__(cls, val) + except TypeError: + # Py2 long doesn't handle bytearray input with an explicit base, so + # handle this here. + # Py3: int(bytearray(b'10'), 2) == 2 + # Py2: int(bytearray(b'10'), 2) == 2 raises TypeError + # Py2: long(bytearray(b'10'), 2) == 2 raises TypeError + try: + return super(newint, cls).__new__(cls, newbytes(val)) + except: + raise TypeError("newint argument must be a string or a number," + "not '{0}'".format(type(val))) + + def __repr__(self): + """ + Without the L suffix + """ + value = super(newint, self).__repr__() + assert value[-1] == 'L' + return value[:-1] + + def __add__(self, other): + value = super(newint, self).__add__(other) + if value is NotImplemented: + return long(self) + other + return newint(value) + + def __radd__(self, other): + value = super(newint, self).__radd__(other) + if value is NotImplemented: + return other + long(self) + return newint(value) + + def __sub__(self, other): + value = super(newint, self).__sub__(other) + if value is NotImplemented: + return long(self) - other + return newint(value) + + def __rsub__(self, other): + value = super(newint, self).__rsub__(other) + if value is NotImplemented: + return other - long(self) + return newint(value) + + def __mul__(self, other): + value = super(newint, self).__mul__(other) + if isint(value): + return newint(value) + elif value is NotImplemented: + return long(self) * other + return value + + def __rmul__(self, other): + value = super(newint, self).__rmul__(other) + if isint(value): + return newint(value) + elif value is NotImplemented: + return other * long(self) + return value + + def __div__(self, other): + # We override this rather than e.g. relying on object.__div__ or + # long.__div__ because we want to wrap the value in a newint() + # call if other is another int + value = long(self) / other + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __rdiv__(self, other): + value = other / long(self) + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __idiv__(self, other): + # long has no __idiv__ method. Use __itruediv__ and cast back to + # newint: + value = self.__itruediv__(other) + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __truediv__(self, other): + value = super(newint, self).__truediv__(other) + if value is NotImplemented: + value = long(self) / other + return value + + def __rtruediv__(self, other): + return super(newint, self).__rtruediv__(other) + + def __itruediv__(self, other): + # long has no __itruediv__ method + mylong = long(self) + mylong /= other + return mylong + + def __floordiv__(self, other): + return newint(super(newint, self).__floordiv__(other)) + + def __rfloordiv__(self, other): + return newint(super(newint, self).__rfloordiv__(other)) + + def __ifloordiv__(self, other): + # long has no __ifloordiv__ method + mylong = long(self) + mylong //= other + return newint(mylong) + + def __mod__(self, other): + value = super(newint, self).__mod__(other) + if value is NotImplemented: + return long(self) % other + return newint(value) + + def __rmod__(self, other): + value = super(newint, self).__rmod__(other) + if value is NotImplemented: + return other % long(self) + return newint(value) + + def __divmod__(self, other): + value = super(newint, self).__divmod__(other) + if value is NotImplemented: + mylong = long(self) + return (mylong // other, mylong % other) + return (newint(value[0]), newint(value[1])) + + def __rdivmod__(self, other): + value = super(newint, self).__rdivmod__(other) + if value is NotImplemented: + mylong = long(self) + return (other // mylong, other % mylong) + return (newint(value[0]), newint(value[1])) + + def __pow__(self, other): + value = super(newint, self).__pow__(other) + if value is NotImplemented: + return long(self) ** other + return newint(value) + + def __rpow__(self, other): + value = super(newint, self).__rpow__(other) + if value is NotImplemented: + return other ** long(self) + return newint(value) + + def __lshift__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for <<: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__lshift__(other)) + + def __rshift__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for >>: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__rshift__(other)) + + def __and__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for &: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__and__(other)) + + def __or__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for |: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__or__(other)) + + def __xor__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for ^: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__xor__(other)) + + def __neg__(self): + return newint(super(newint, self).__neg__()) + + def __pos__(self): + return newint(super(newint, self).__pos__()) + + def __abs__(self): + return newint(super(newint, self).__abs__()) + + def __invert__(self): + return newint(super(newint, self).__invert__()) + + def __int__(self): + return self + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + """ + So subclasses can override this, Py3-style + """ + return super(newint, self).__nonzero__() + + def __native__(self): + return long(self) + + def to_bytes(self, length, byteorder='big', signed=False): + """ + Return an array of bytes representing an integer. + + The integer is represented using length bytes. An OverflowError is + raised if the integer is not representable with the given number of + bytes. + + The byteorder argument determines the byte order used to represent the + integer. If byteorder is 'big', the most significant byte is at the + beginning of the byte array. If byteorder is 'little', the most + significant byte is at the end of the byte array. To request the native + byte order of the host system, use `sys.byteorder' as the byte order value. + + The signed keyword-only argument determines whether two's complement is + used to represent the integer. If signed is False and a negative integer + is given, an OverflowError is raised. + """ + if length < 0: + raise ValueError("length argument must be non-negative") + if length == 0 and self == 0: + return newbytes() + if signed and self < 0: + bits = length * 8 + num = (2**bits) + self + if num <= 0: + raise OverflowError("int too smal to convert") + else: + if self < 0: + raise OverflowError("can't convert negative int to unsigned") + num = self + if byteorder not in ('little', 'big'): + raise ValueError("byteorder must be either 'little' or 'big'") + h = b'%x' % num + s = newbytes((b'0'*(len(h) % 2) + h).zfill(length*2).decode('hex')) + if signed: + high_set = s[0] & 0x80 + if self > 0 and high_set: + raise OverflowError("int too big to convert") + if self < 0 and not high_set: + raise OverflowError("int too small to convert") + if len(s) > length: + raise OverflowError("int too big to convert") + return s if byteorder == 'big' else s[::-1] + + @classmethod + def from_bytes(cls, mybytes, byteorder='big', signed=False): + """ + Return the integer represented by the given array of bytes. + + The mybytes argument must either support the buffer protocol or be an + iterable object producing bytes. Bytes and bytearray are examples of + built-in objects that support the buffer protocol. + + The byteorder argument determines the byte order used to represent the + integer. If byteorder is 'big', the most significant byte is at the + beginning of the byte array. If byteorder is 'little', the most + significant byte is at the end of the byte array. To request the native + byte order of the host system, use `sys.byteorder' as the byte order value. + + The signed keyword-only argument indicates whether two's complement is + used to represent the integer. + """ + if byteorder not in ('little', 'big'): + raise ValueError("byteorder must be either 'little' or 'big'") + if isinstance(mybytes, unicode): + raise TypeError("cannot convert unicode objects to bytes") + # mybytes can also be passed as a sequence of integers on Py3. + # Test for this: + elif isinstance(mybytes, collections.Iterable): + mybytes = newbytes(mybytes) + b = mybytes if byteorder == 'big' else mybytes[::-1] + if len(b) == 0: + b = b'\x00' + # The encode() method has been disabled by newbytes, but Py2's + # str has it: + num = int(native(b).encode('hex'), 16) + if signed and (b[0] & 0x80): + num = num - (2 ** (len(b)*8)) + return cls(num) + + +# def _twos_comp(val, bits): +# """compute the 2's compliment of int value val""" +# if( (val&(1<<(bits-1))) != 0 ): +# val = val - (1<>> from builtins import list +>>> l1 = list() # instead of {} for an empty list +>>> l1.append('hello') +>>> l2 = l1.copy() + +""" + +import sys +import copy + +from future.utils import with_metaclass +from future.types.newobject import newobject + + +_builtin_list = list +ver = sys.version_info[:2] + + +class BaseNewList(type): + def __instancecheck__(cls, instance): + if cls == newlist: + return isinstance(instance, _builtin_list) + else: + return issubclass(instance.__class__, cls) + + +class newlist(with_metaclass(BaseNewList, _builtin_list)): + """ + A backport of the Python 3 list object to Py2 + """ + def copy(self): + """ + L.copy() -> list -- a shallow copy of L + """ + return copy.copy(self) + + def clear(self): + """L.clear() -> None -- remove all items from L""" + for i in range(len(self)): + self.pop() + + def __new__(cls, *args, **kwargs): + """ + list() -> new empty list + list(iterable) -> new list initialized from iterable's items + """ + + if len(args) == 0: + return super(newlist, cls).__new__(cls) + elif type(args[0]) == newlist: + value = args[0] + else: + value = args[0] + return super(newlist, cls).__new__(cls, value) + + def __add__(self, value): + return newlist(super(newlist, self).__add__(value)) + + def __radd__(self, left): + " left + self " + try: + return newlist(left) + self + except: + return NotImplemented + + def __getitem__(self, y): + """ + x.__getitem__(y) <==> x[y] + + Warning: a bug in Python 2.x prevents indexing via a slice from + returning a newlist object. + """ + if isinstance(y, slice): + return newlist(super(newlist, self).__getitem__(y)) + else: + return super(newlist, self).__getitem__(y) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return list(self) + + def __nonzero__(self): + return len(self) > 0 + + +__all__ = ['newlist'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newmemoryview.py b/.install/.kodi/addons/script.module.future/libs/future/types/newmemoryview.py new file mode 100644 index 000000000..72c6990a7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newmemoryview.py @@ -0,0 +1,27 @@ +""" +A pretty lame implementation of a memoryview object for Python 2.6. +""" + +from collections import Iterable +from numbers import Integral +import string + +from future.utils import istext, isbytes, PY3, with_metaclass +from future.types import no, issubset + + +# class BaseNewBytes(type): +# def __instancecheck__(cls, instance): +# return isinstance(instance, _builtin_bytes) + + +class newmemoryview(object): # with_metaclass(BaseNewBytes, _builtin_bytes)): + """ + A pretty lame backport of the Python 2.7 and Python 3.x + memoryviewview object to Py2.6. + """ + def __init__(self, obj): + return obj + + +__all__ = ['newmemoryview'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newobject.py b/.install/.kodi/addons/script.module.future/libs/future/types/newobject.py new file mode 100644 index 000000000..776d47664 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newobject.py @@ -0,0 +1,116 @@ +""" +An object subclass for Python 2 that gives new-style classes written in the +style of Python 3 (with ``__next__`` and unicode-returning ``__str__`` methods) +the appropriate Python 2-style ``next`` and ``__unicode__`` methods for compatible. + +Example use:: + + from builtins import object + + my_unicode_str = u'Unicode string: \u5b54\u5b50' + + class A(object): + def __str__(self): + return my_unicode_str + + a = A() + print(str(a)) + + # On Python 2, these relations hold: + assert unicode(a) == my_unicode_string + assert str(a) == my_unicode_string.encode('utf-8') + + +Another example:: + + from builtins import object + + class Upper(object): + def __init__(self, iterable): + self._iter = iter(iterable) + def __next__(self): # note the Py3 interface + return next(self._iter).upper() + def __iter__(self): + return self + + assert list(Upper('hello')) == list('HELLO') + +""" + + +class newobject(object): + """ + A magical object class that provides Python 2 compatibility methods:: + next + __unicode__ + __nonzero__ + + Subclasses of this class can merely define the Python 3 methods (__next__, + __str__, and __bool__). + """ + def next(self): + if hasattr(self, '__next__'): + return type(self).__next__(self) + raise TypeError('newobject is not an iterator') + + def __unicode__(self): + # All subclasses of the builtin object should have __str__ defined. + # Note that old-style classes do not have __str__ defined. + if hasattr(self, '__str__'): + s = type(self).__str__(self) + else: + s = str(self) + if isinstance(s, unicode): + return s + else: + return s.decode('utf-8') + + def __nonzero__(self): + if hasattr(self, '__bool__'): + return type(self).__bool__(self) + if hasattr(self, '__len__'): + return type(self).__len__(self) + # object has no __nonzero__ method + return True + + # Are these ever needed? + # def __div__(self): + # return self.__truediv__() + + # def __idiv__(self, other): + # return self.__itruediv__(other) + + def __long__(self): + if not hasattr(self, '__int__'): + return NotImplemented + return self.__int__() # not type(self).__int__(self) + + # def __new__(cls, *args, **kwargs): + # """ + # dict() -> new empty dictionary + # dict(mapping) -> new dictionary initialized from a mapping object's + # (key, value) pairs + # dict(iterable) -> new dictionary initialized as if via: + # d = {} + # for k, v in iterable: + # d[k] = v + # dict(**kwargs) -> new dictionary initialized with the name=value pairs + # in the keyword argument list. For example: dict(one=1, two=2) + # """ + + # if len(args) == 0: + # return super(newdict, cls).__new__(cls) + # elif type(args[0]) == newdict: + # return args[0] + # else: + # value = args[0] + # return super(newdict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return object(self) + + +__all__ = ['newobject'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newopen.py b/.install/.kodi/addons/script.module.future/libs/future/types/newopen.py new file mode 100644 index 000000000..b75d45afb --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newopen.py @@ -0,0 +1,32 @@ +""" +A substitute for the Python 3 open() function. + +Note that io.open() is more complete but maybe slower. Even so, the +completeness may be a better default. TODO: compare these +""" + +_builtin_open = open + +class newopen(object): + """Wrapper providing key part of Python 3 open() interface. + + From IPython's py3compat.py module. License: BSD. + """ + def __init__(self, fname, mode="r", encoding="utf-8"): + self.f = _builtin_open(fname, mode) + self.enc = encoding + + def write(self, s): + return self.f.write(s.encode(self.enc)) + + def read(self, size=-1): + return self.f.read(size).decode(self.enc) + + def close(self): + return self.f.close() + + def __enter__(self): + return self + + def __exit__(self, etype, value, traceback): + self.f.close() diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newrange.py b/.install/.kodi/addons/script.module.future/libs/future/types/newrange.py new file mode 100644 index 000000000..9173b0509 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newrange.py @@ -0,0 +1,165 @@ +""" +Nearly identical to xrange.py, by Dan Crosta, from + + https://github.com/dcrosta/xrange.git + +This is included here in the ``future`` package rather than pointed to as +a dependency because there is no package for ``xrange`` on PyPI. It is +also tweaked to appear like a regular Python 3 ``range`` object rather +than a Python 2 xrange. + +From Dan Crosta's README: + + "A pure-Python implementation of Python 2.7's xrange built-in, with + some features backported from the Python 3.x range built-in (which + replaced xrange) in that version." + + Read more at + https://late.am/post/2012/06/18/what-the-heck-is-an-xrange +""" +from __future__ import absolute_import + +from collections import Sequence, Iterator +from itertools import islice + +from future.backports.misc import count # with step parameter on Py2.6 +# For backward compatibility with python-future versions < 0.14.4: +_count = count + + +class newrange(Sequence): + """ + Pure-Python backport of Python 3's range object. See `the CPython + documentation for details: + `_ + """ + + def __init__(self, *args): + if len(args) == 1: + start, stop, step = 0, args[0], 1 + elif len(args) == 2: + start, stop, step = args[0], args[1], 1 + elif len(args) == 3: + start, stop, step = args + else: + raise TypeError('range() requires 1-3 int arguments') + + try: + start, stop, step = int(start), int(stop), int(step) + except ValueError: + raise TypeError('an integer is required') + + if step == 0: + raise ValueError('range() arg 3 must not be zero') + elif step < 0: + stop = min(stop, start) + else: + stop = max(stop, start) + + self._start = start + self._stop = stop + self._step = step + self._len = (stop - start) // step + bool((stop - start) % step) + + @property + def start(self): + return self._start + + @property + def stop(self): + return self._stop + + @property + def step(self): + return self._step + + def __repr__(self): + if self._step == 1: + return 'range(%d, %d)' % (self._start, self._stop) + return 'range(%d, %d, %d)' % (self._start, self._stop, self._step) + + def __eq__(self, other): + return (isinstance(other, newrange) and + (self._len == 0 == other._len or + (self._start, self._step, self._len) == + (other._start, other._step, self._len))) + + def __len__(self): + return self._len + + def index(self, value): + """Return the 0-based position of integer `value` in + the sequence this range represents.""" + try: + diff = value - self._start + except TypeError: + raise ValueError('%r is not in range' % value) + quotient, remainder = divmod(diff, self._step) + if remainder == 0 and 0 <= quotient < self._len: + return abs(quotient) + raise ValueError('%r is not in range' % value) + + def count(self, value): + """Return the number of ocurrences of integer `value` + in the sequence this range represents.""" + # a value can occur exactly zero or one times + return int(value in self) + + def __contains__(self, value): + """Return ``True`` if the integer `value` occurs in + the sequence this range represents.""" + try: + self.index(value) + return True + except ValueError: + return False + + def __reversed__(self): + return iter(self[::-1]) + + def __getitem__(self, index): + """Return the element at position ``index`` in the sequence + this range represents, or raise :class:`IndexError` if the + position is out of range.""" + if isinstance(index, slice): + return self.__getitem_slice(index) + if index < 0: + # negative indexes access from the end + index = self._len + index + if index < 0 or index >= self._len: + raise IndexError('range object index out of range') + return self._start + index * self._step + + def __getitem_slice(self, slce): + """Return a range which represents the requested slce + of the sequence represented by this range. + """ + scaled_indices = (self._step * n for n in slce.indices(self._len)) + start_offset, stop_offset, new_step = scaled_indices + return newrange(self._start + start_offset, + self._start + stop_offset, + new_step) + + def __iter__(self): + """Return an iterator which enumerates the elements of the + sequence this range represents.""" + return range_iterator(self) + + +class range_iterator(Iterator): + """An iterator for a :class:`range`. + """ + def __init__(self, range_): + self._stepper = islice(count(range_.start, range_.step), len(range_)) + + def __iter__(self): + return self + + def __next__(self): + return next(self._stepper) + + def next(self): + return next(self._stepper) + + +__all__ = ['newrange'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/types/newstr.py b/.install/.kodi/addons/script.module.future/libs/future/types/newstr.py new file mode 100644 index 000000000..e6272fb90 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/types/newstr.py @@ -0,0 +1,416 @@ +""" +This module redefines ``str`` on Python 2.x to be a subclass of the Py2 +``unicode`` type that behaves like the Python 3.x ``str``. + +The main differences between ``newstr`` and Python 2.x's ``unicode`` type are +the stricter type-checking and absence of a `u''` prefix in the representation. + +It is designed to be used together with the ``unicode_literals`` import +as follows: + + >>> from __future__ import unicode_literals + >>> from builtins import str, isinstance + +On Python 3.x and normally on Python 2.x, these expressions hold + + >>> str('blah') is 'blah' + True + >>> isinstance('blah', str) + True + +However, on Python 2.x, with this import: + + >>> from __future__ import unicode_literals + +the same expressions are False: + + >>> str('blah') is 'blah' + False + >>> isinstance('blah', str) + False + +This module is designed to be imported together with ``unicode_literals`` on +Python 2 to bring the meaning of ``str`` back into alignment with unprefixed +string literals (i.e. ``unicode`` subclasses). + +Note that ``str()`` (and ``print()``) would then normally call the +``__unicode__`` method on objects in Python 2. To define string +representations of your objects portably across Py3 and Py2, use the +:func:`python_2_unicode_compatible` decorator in :mod:`future.utils`. + +""" + +from collections import Iterable +from numbers import Number + +from future.utils import PY3, istext, with_metaclass, isnewbytes +from future.types import no, issubset +from future.types.newobject import newobject + + +if PY3: + # We'll probably never use newstr on Py3 anyway... + unicode = str + + +class BaseNewStr(type): + def __instancecheck__(cls, instance): + if cls == newstr: + return isinstance(instance, unicode) + else: + return issubclass(instance.__class__, cls) + + +class newstr(with_metaclass(BaseNewStr, unicode)): + """ + A backport of the Python 3 str object to Py2 + """ + no_convert_msg = "Can't convert '{0}' object to str implicitly" + + def __new__(cls, *args, **kwargs): + """ + From the Py3 str docstring: + + str(object='') -> str + str(bytes_or_buffer[, encoding[, errors]]) -> str + + Create a new string object from the given object. If encoding or + errors is specified, then the object must expose a data buffer + that will be decoded using the given encoding and error handler. + Otherwise, returns the result of object.__str__() (if defined) + or repr(object). + encoding defaults to sys.getdefaultencoding(). + errors defaults to 'strict'. + + """ + if len(args) == 0: + return super(newstr, cls).__new__(cls) + # Special case: If someone requests str(str(u'abc')), return the same + # object (same id) for consistency with Py3.3. This is not true for + # other objects like list or dict. + elif type(args[0]) == newstr and cls == newstr: + return args[0] + elif isinstance(args[0], unicode): + value = args[0] + elif isinstance(args[0], bytes): # i.e. Py2 bytes or newbytes + if 'encoding' in kwargs or len(args) > 1: + value = args[0].decode(*args[1:], **kwargs) + else: + value = args[0].__str__() + else: + value = args[0] + return super(newstr, cls).__new__(cls, value) + + def __repr__(self): + """ + Without the u prefix + """ + value = super(newstr, self).__repr__() + # assert value[0] == u'u' + return value[1:] + + def __getitem__(self, y): + """ + Warning: Python <= 2.7.6 has a bug that causes this method never to be called + when y is a slice object. Therefore the type of newstr()[:2] is wrong + (unicode instead of newstr). + """ + return newstr(super(newstr, self).__getitem__(y)) + + def __contains__(self, key): + errmsg = "'in ' requires string as left operand, not {0}" + # Don't use isinstance() here because we only want to catch + # newstr, not Python 2 unicode: + if type(key) == newstr: + newkey = key + elif isinstance(key, unicode) or isinstance(key, bytes) and not isnewbytes(key): + newkey = newstr(key) + else: + raise TypeError(errmsg.format(type(key))) + return issubset(list(newkey), list(self)) + + @no('newbytes') + def __add__(self, other): + return newstr(super(newstr, self).__add__(other)) + + @no('newbytes') + def __radd__(self, left): + " left + self " + try: + return newstr(left) + self + except: + return NotImplemented + + def __mul__(self, other): + return newstr(super(newstr, self).__mul__(other)) + + def __rmul__(self, other): + return newstr(super(newstr, self).__rmul__(other)) + + def join(self, iterable): + errmsg = 'sequence item {0}: expected unicode string, found bytes' + for i, item in enumerate(iterable): + # Here we use type() rather than isinstance() because + # __instancecheck__ is being overridden. E.g. + # isinstance(b'abc', newbytes) is True on Py2. + if isnewbytes(item): + raise TypeError(errmsg.format(i)) + # Support use as a staticmethod: str.join('-', ['a', 'b']) + if type(self) == newstr: + return newstr(super(newstr, self).join(iterable)) + else: + return newstr(super(newstr, newstr(self)).join(iterable)) + + @no('newbytes') + def find(self, sub, *args): + return super(newstr, self).find(sub, *args) + + @no('newbytes') + def rfind(self, sub, *args): + return super(newstr, self).rfind(sub, *args) + + @no('newbytes', (1, 2)) + def replace(self, old, new, *args): + return newstr(super(newstr, self).replace(old, new, *args)) + + def decode(self, *args): + raise AttributeError("decode method has been disabled in newstr") + + def encode(self, encoding='utf-8', errors='strict'): + """ + Returns bytes + + Encode S using the codec registered for encoding. Default encoding + is 'utf-8'. errors may be given to set a different error + handling scheme. Default is 'strict' meaning that encoding errors raise + a UnicodeEncodeError. Other possible values are 'ignore', 'replace' and + 'xmlcharrefreplace' as well as any other name registered with + codecs.register_error that can handle UnicodeEncodeErrors. + """ + from future.types.newbytes import newbytes + # Py2 unicode.encode() takes encoding and errors as optional parameter, + # not keyword arguments as in Python 3 str. + + # For the surrogateescape error handling mechanism, the + # codecs.register_error() function seems to be inadequate for an + # implementation of it when encoding. (Decoding seems fine, however.) + # For example, in the case of + # u'\udcc3'.encode('ascii', 'surrogateescape_handler') + # after registering the ``surrogateescape_handler`` function in + # future.utils.surrogateescape, both Python 2.x and 3.x raise an + # exception anyway after the function is called because the unicode + # string it has to return isn't encodable strictly as ASCII. + + if errors == 'surrogateescape': + if encoding == 'utf-16': + # Known to fail here. See test_encoding_works_normally() + raise NotImplementedError('FIXME: surrogateescape handling is ' + 'not yet implemented properly') + # Encode char by char, building up list of byte-strings + mybytes = [] + for c in self: + code = ord(c) + if 0xD800 <= code <= 0xDCFF: + mybytes.append(newbytes([code - 0xDC00])) + else: + mybytes.append(c.encode(encoding=encoding)) + return newbytes(b'').join(mybytes) + return newbytes(super(newstr, self).encode(encoding, errors)) + + @no('newbytes', 1) + def startswith(self, prefix, *args): + if isinstance(prefix, Iterable): + for thing in prefix: + if isnewbytes(thing): + raise TypeError(self.no_convert_msg.format(type(thing))) + return super(newstr, self).startswith(prefix, *args) + + @no('newbytes', 1) + def endswith(self, prefix, *args): + # Note we need the decorator above as well as the isnewbytes() + # check because prefix can be either a bytes object or e.g. a + # tuple of possible prefixes. (If it's a bytes object, each item + # in it is an int.) + if isinstance(prefix, Iterable): + for thing in prefix: + if isnewbytes(thing): + raise TypeError(self.no_convert_msg.format(type(thing))) + return super(newstr, self).endswith(prefix, *args) + + @no('newbytes', 1) + def split(self, sep=None, maxsplit=-1): + # Py2 unicode.split() takes maxsplit as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).split(sep, maxsplit) + return [newstr(part) for part in parts] + + @no('newbytes', 1) + def rsplit(self, sep=None, maxsplit=-1): + # Py2 unicode.rsplit() takes maxsplit as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).rsplit(sep, maxsplit) + return [newstr(part) for part in parts] + + @no('newbytes', 1) + def partition(self, sep): + parts = super(newstr, self).partition(sep) + return tuple(newstr(part) for part in parts) + + @no('newbytes', 1) + def rpartition(self, sep): + parts = super(newstr, self).rpartition(sep) + return tuple(newstr(part) for part in parts) + + @no('newbytes', 1) + def index(self, sub, *args): + """ + Like newstr.find() but raise ValueError when the substring is not + found. + """ + pos = self.find(sub, *args) + if pos == -1: + raise ValueError('substring not found') + return pos + + def splitlines(self, keepends=False): + """ + S.splitlines(keepends=False) -> list of strings + + Return a list of the lines in S, breaking at line boundaries. + Line breaks are not included in the resulting list unless keepends + is given and true. + """ + # Py2 unicode.splitlines() takes keepends as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).splitlines(keepends) + return [newstr(part) for part in parts] + + def __eq__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__eq__(other) + else: + return False + + def __ne__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ne__(other) + else: + return True + + unorderable_err = 'unorderable types: str() and {0}' + + def __lt__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __le__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __gt__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __ge__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __getattribute__(self, name): + """ + A trick to cause the ``hasattr`` builtin-fn to return False for + the 'decode' method on Py2. + """ + if name in ['decode', u'decode']: + raise AttributeError("decode method has been disabled in newstr") + return super(newstr, self).__getattribute__(name) + + def __native__(self): + """ + A hook for the future.utils.native() function. + """ + return unicode(self) + + @staticmethod + def maketrans(x, y=None, z=None): + """ + Return a translation table usable for str.translate(). + + If there is only one argument, it must be a dictionary mapping Unicode + ordinals (integers) or characters to Unicode ordinals, strings or None. + Character keys will be then converted to ordinals. + If there are two arguments, they must be strings of equal length, and + in the resulting dictionary, each character in x will be mapped to the + character at the same position in y. If there is a third argument, it + must be a string, whose characters will be mapped to None in the result. + """ + + if y is None: + assert z is None + if not isinstance(x, dict): + raise TypeError('if you give only one argument to maketrans it must be a dict') + result = {} + for (key, value) in x.items(): + if len(key) > 1: + raise ValueError('keys in translate table must be strings or integers') + result[ord(key)] = value + else: + if not isinstance(x, unicode) and isinstance(y, unicode): + raise TypeError('x and y must be unicode strings') + if not len(x) == len(y): + raise ValueError('the first two maketrans arguments must have equal length') + result = {} + for (xi, yi) in zip(x, y): + if len(xi) > 1: + raise ValueError('keys in translate table must be strings or integers') + result[ord(xi)] = ord(yi) + + if z is not None: + for char in z: + result[ord(char)] = None + return result + + def translate(self, table): + """ + S.translate(table) -> str + + Return a copy of the string S, where all characters have been mapped + through the given translation table, which must be a mapping of + Unicode ordinals to Unicode ordinals, strings, or None. + Unmapped characters are left untouched. Characters mapped to None + are deleted. + """ + l = [] + for c in self: + if ord(c) in table: + val = table[ord(c)] + if val is None: + continue + elif isinstance(val, unicode): + l.append(val) + else: + l.append(chr(val)) + else: + l.append(c) + return ''.join(l) + + def isprintable(self): + raise NotImplementedError('fixme') + + def isidentifier(self): + raise NotImplementedError('fixme') + + def format_map(self): + raise NotImplementedError('fixme') + + +__all__ = ['newstr'] diff --git a/.install/.kodi/addons/script.module.future/libs/future/utils/__init__.py b/.install/.kodi/addons/script.module.future/libs/future/utils/__init__.py new file mode 100644 index 000000000..906f1e464 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/utils/__init__.py @@ -0,0 +1,741 @@ +""" +A selection of cross-compatible functions for Python 2 and 3. + +This module exports useful functions for 2/3 compatible code: + + * bind_method: binds functions to classes + * ``native_str_to_bytes`` and ``bytes_to_native_str`` + * ``native_str``: always equal to the native platform string object (because + this may be shadowed by imports from future.builtins) + * lists: lrange(), lmap(), lzip(), lfilter() + * iterable method compatibility: + - iteritems, iterkeys, itervalues + - viewitems, viewkeys, viewvalues + + These use the original method if available, otherwise they use items, + keys, values. + + * types: + + * text_type: unicode in Python 2, str in Python 3 + * binary_type: str in Python 2, bytes in Python 3 + * string_types: basestring in Python 2, str in Python 3 + + * bchr(c): + Take an integer and make a 1-character byte string + * bord(c) + Take the result of indexing on a byte string and make an integer + * tobytes(s) + Take a text string, a byte string, or a sequence of characters taken + from a byte string, and make a byte string. + + * raise_from() + * raise_with_traceback() + +This module also defines these decorators: + + * ``python_2_unicode_compatible`` + * ``with_metaclass`` + * ``implements_iterator`` + +Some of the functions in this module come from the following sources: + + * Jinja2 (BSD licensed: see + https://github.com/mitsuhiko/jinja2/blob/master/LICENSE) + * Pandas compatibility module pandas.compat + * six.py by Benjamin Peterson + * Django +""" + +import types +import sys +import numbers +import functools +import copy +import inspect + + +PY3 = sys.version_info[0] == 3 +PY35_PLUS = sys.version_info[0:2] >= (3, 5) +PY36_PLUS = sys.version_info[0:2] >= (3, 6) +PY2 = sys.version_info[0] == 2 +PY26 = sys.version_info[0:2] == (2, 6) +PY27 = sys.version_info[0:2] == (2, 7) +PYPY = hasattr(sys, 'pypy_translation_info') + + +def python_2_unicode_compatible(cls): + """ + A decorator that defines __unicode__ and __str__ methods under Python + 2. Under Python 3, this decorator is a no-op. + + To support Python 2 and 3 with a single code base, define a __str__ + method returning unicode text and apply this decorator to the class, like + this:: + + >>> from future.utils import python_2_unicode_compatible + + >>> @python_2_unicode_compatible + ... class MyClass(object): + ... def __str__(self): + ... return u'Unicode string: \u5b54\u5b50' + + >>> a = MyClass() + + Then, after this import: + + >>> from future.builtins import str + + the following is ``True`` on both Python 3 and 2:: + + >>> str(a) == a.encode('utf-8').decode('utf-8') + True + + and, on a Unicode-enabled terminal with the right fonts, these both print the + Chinese characters for Confucius:: + + >>> print(a) + >>> print(str(a)) + + The implementation comes from django.utils.encoding. + """ + if not PY3: + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda self: self.__unicode__().encode('utf-8') + return cls + + +def with_metaclass(meta, *bases): + """ + Function from jinja2/_compat.py. License: BSD. + + Use it like this:: + + class BaseForm(object): + pass + + class FormType(type): + pass + + class Form(with_metaclass(FormType, BaseForm)): + pass + + This requires a bit of explanation: the basic idea is to make a + dummy metaclass for one level of class instantiation that replaces + itself with the actual metaclass. Because of internal type checks + we also need to make sure that we downgrade the custom metaclass + for one level to something closer to type (that's why __call__ and + __init__ comes back from type etc.). + + This has the advantage over six.with_metaclass of not introducing + dummy classes into the final MRO. + """ + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +# Definitions from pandas.compat and six.py follow: +if PY3: + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s, str): + return bytes(s, 'latin-1') + else: + return bytes(s) + def bord(s): + return s + + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + +else: + # Python 2 + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + +### + +if PY3: + def tobytes(s): + if isinstance(s, bytes): + return s + else: + if isinstance(s, str): + return s.encode('latin-1') + else: + return bytes(s) +else: + # Python 2 + def tobytes(s): + if isinstance(s, unicode): + return s.encode('latin-1') + else: + return ''.join(s) + +tobytes.__doc__ = """ + Encodes to latin-1 (where the first 256 chars are the same as + ASCII.) + """ + +if PY3: + def native_str_to_bytes(s, encoding='utf-8'): + return s.encode(encoding) + + def bytes_to_native_str(b, encoding='utf-8'): + return b.decode(encoding) + + def text_to_native_str(t, encoding=None): + return t +else: + # Python 2 + def native_str_to_bytes(s, encoding=None): + from future.types import newbytes # to avoid a circular import + return newbytes(s) + + def bytes_to_native_str(b, encoding=None): + return native(b) + + def text_to_native_str(t, encoding='ascii'): + """ + Use this to create a Py2 native string when "from __future__ import + unicode_literals" is in effect. + """ + return unicode(t).encode(encoding) + +native_str_to_bytes.__doc__ = """ + On Py3, returns an encoded string. + On Py2, returns a newbytes type, ignoring the ``encoding`` argument. + """ + +if PY3: + # list-producing versions of the major Python iterating functions + def lrange(*args, **kwargs): + return list(range(*args, **kwargs)) + + def lzip(*args, **kwargs): + return list(zip(*args, **kwargs)) + + def lmap(*args, **kwargs): + return list(map(*args, **kwargs)) + + def lfilter(*args, **kwargs): + return list(filter(*args, **kwargs)) +else: + import __builtin__ + # Python 2-builtin ranges produce lists + lrange = __builtin__.range + lzip = __builtin__.zip + lmap = __builtin__.map + lfilter = __builtin__.filter + + +def isidentifier(s, dotted=False): + ''' + A function equivalent to the str.isidentifier method on Py3 + ''' + if dotted: + return all(isidentifier(a) for a in s.split('.')) + if PY3: + return s.isidentifier() + else: + import re + _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") + return bool(_name_re.match(s)) + + +def viewitems(obj, **kwargs): + """ + Function for iterating over dictionary items with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewitems", None) + if not func: + func = obj.items + return func(**kwargs) + + +def viewkeys(obj, **kwargs): + """ + Function for iterating over dictionary keys with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewkeys", None) + if not func: + func = obj.keys + return func(**kwargs) + + +def viewvalues(obj, **kwargs): + """ + Function for iterating over dictionary values with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewvalues", None) + if not func: + func = obj.values + return func(**kwargs) + + +def iteritems(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewitems(). + """ + func = getattr(obj, "iteritems", None) + if not func: + func = obj.items + return func(**kwargs) + + +def iterkeys(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewkeys(). + """ + func = getattr(obj, "iterkeys", None) + if not func: + func = obj.keys + return func(**kwargs) + + +def itervalues(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewvalues(). + """ + func = getattr(obj, "itervalues", None) + if not func: + func = obj.values + return func(**kwargs) + + +def bind_method(cls, name, func): + """Bind a method to class, python 2 and python 3 compatible. + + Parameters + ---------- + + cls : type + class to receive bound method + name : basestring + name of method on class instance + func : function + function to be bound as method + + Returns + ------- + None + """ + # only python 2 has an issue with bound/unbound methods + if not PY3: + setattr(cls, name, types.MethodType(func, None, cls)) + else: + setattr(cls, name, func) + + +def getexception(): + return sys.exc_info()[1] + + +def _get_caller_globals_and_locals(): + """ + Returns the globals and locals of the calling frame. + + Is there an alternative to frame hacking here? + """ + caller_frame = inspect.stack()[2] + myglobals = caller_frame[0].f_globals + mylocals = caller_frame[0].f_locals + return myglobals, mylocals + + +def _repr_strip(mystring): + """ + Returns the string without any initial or final quotes. + """ + r = repr(mystring) + if r.startswith("'") and r.endswith("'"): + return r[1:-1] + else: + return r + + +if PY3: + def raise_from(exc, cause): + """ + Equivalent to: + + raise EXCEPTION from CAUSE + + on Python 3. (See PEP 3134). + """ + myglobals, mylocals = _get_caller_globals_and_locals() + + # We pass the exception and cause along with other globals + # when we exec(): + myglobals = myglobals.copy() + myglobals['__python_future_raise_from_exc'] = exc + myglobals['__python_future_raise_from_cause'] = cause + execstr = "raise __python_future_raise_from_exc from __python_future_raise_from_cause" + exec(execstr, myglobals, mylocals) + + def raise_(tp, value=None, tb=None): + """ + A function that matches the Python 2.x ``raise`` statement. This + allows re-raising exceptions with the cls value and traceback on + Python 2 and 3. + """ + if value is not None and isinstance(tp, Exception): + raise TypeError("instance exception may not have a separate value") + if value is not None: + exc = tp(value) + else: + exc = tp + if exc.__traceback__ is not tb: + raise exc.with_traceback(tb) + raise exc + + def raise_with_traceback(exc, traceback=Ellipsis): + if traceback == Ellipsis: + _, _, traceback = sys.exc_info() + raise exc.with_traceback(traceback) + +else: + def raise_from(exc, cause): + """ + Equivalent to: + + raise EXCEPTION from CAUSE + + on Python 3. (See PEP 3134). + """ + # Is either arg an exception class (e.g. IndexError) rather than + # instance (e.g. IndexError('my message here')? If so, pass the + # name of the class undisturbed through to "raise ... from ...". + if isinstance(exc, type) and issubclass(exc, Exception): + e = exc() + # exc = exc.__name__ + # execstr = "e = " + _repr_strip(exc) + "()" + # myglobals, mylocals = _get_caller_globals_and_locals() + # exec(execstr, myglobals, mylocals) + else: + e = exc + e.__suppress_context__ = False + if isinstance(cause, type) and issubclass(cause, Exception): + e.__cause__ = cause() + e.__suppress_context__ = True + elif cause is None: + e.__cause__ = None + e.__suppress_context__ = True + elif isinstance(cause, BaseException): + e.__cause__ = cause + e.__suppress_context__ = True + else: + raise TypeError("exception causes must derive from BaseException") + e.__context__ = sys.exc_info()[1] + raise e + + exec(''' +def raise_(tp, value=None, tb=None): + raise tp, value, tb + +def raise_with_traceback(exc, traceback=Ellipsis): + if traceback == Ellipsis: + _, _, traceback = sys.exc_info() + raise exc, None, traceback +'''.strip()) + + +raise_with_traceback.__doc__ = ( +"""Raise exception with existing traceback. +If traceback is not passed, uses sys.exc_info() to get traceback.""" +) + + +# Deprecated alias for backward compatibility with ``future`` versions < 0.11: +reraise = raise_ + + +def implements_iterator(cls): + ''' + From jinja2/_compat.py. License: BSD. + + Use as a decorator like this:: + + @implements_iterator + class UppercasingIterator(object): + def __init__(self, iterable): + self._iter = iter(iterable) + def __iter__(self): + return self + def __next__(self): + return next(self._iter).upper() + + ''' + if PY3: + return cls + else: + cls.next = cls.__next__ + del cls.__next__ + return cls + +if PY3: + get_next = lambda x: x.next +else: + get_next = lambda x: x.__next__ + + +def encode_filename(filename): + if PY3: + return filename + else: + if isinstance(filename, unicode): + return filename.encode('utf-8') + return filename + + +def is_new_style(cls): + """ + Python 2.7 has both new-style and old-style classes. Old-style classes can + be pesky in some circumstances, such as when using inheritance. Use this + function to test for whether a class is new-style. (Python 3 only has + new-style classes.) + """ + return hasattr(cls, '__class__') and ('__dict__' in dir(cls) + or hasattr(cls, '__slots__')) + +# The native platform string and bytes types. Useful because ``str`` and +# ``bytes`` are redefined on Py2 by ``from future.builtins import *``. +native_str = str +native_bytes = bytes + + +def istext(obj): + """ + Deprecated. Use:: + >>> isinstance(obj, str) + after this import: + >>> from future.builtins import str + """ + return isinstance(obj, type(u'')) + + +def isbytes(obj): + """ + Deprecated. Use:: + >>> isinstance(obj, bytes) + after this import: + >>> from future.builtins import bytes + """ + return isinstance(obj, type(b'')) + + +def isnewbytes(obj): + """ + Equivalent to the result of ``isinstance(obj, newbytes)`` were + ``__instancecheck__`` not overridden on the newbytes subclass. In + other words, it is REALLY a newbytes instance, not a Py2 native str + object? + """ + # TODO: generalize this so that it works with subclasses of newbytes + # Import is here to avoid circular imports: + from future.types.newbytes import newbytes + return type(obj) == newbytes + + +def isint(obj): + """ + Deprecated. Tests whether an object is a Py3 ``int`` or either a Py2 ``int`` or + ``long``. + + Instead of using this function, you can use: + + >>> from future.builtins import int + >>> isinstance(obj, int) + + The following idiom is equivalent: + + >>> from numbers import Integral + >>> isinstance(obj, Integral) + """ + + return isinstance(obj, numbers.Integral) + + +def native(obj): + """ + On Py3, this is a no-op: native(obj) -> obj + + On Py2, returns the corresponding native Py2 types that are + superclasses for backported objects from Py3: + + >>> from builtins import str, bytes, int + + >>> native(str(u'ABC')) + u'ABC' + >>> type(native(str(u'ABC'))) + unicode + + >>> native(bytes(b'ABC')) + b'ABC' + >>> type(native(bytes(b'ABC'))) + bytes + + >>> native(int(10**20)) + 100000000000000000000L + >>> type(native(int(10**20))) + long + + Existing native types on Py2 will be returned unchanged: + + >>> type(native(u'ABC')) + unicode + """ + if hasattr(obj, '__native__'): + return obj.__native__() + else: + return obj + + +# Implementation of exec_ is from ``six``: +if PY3: + import builtins + exec_ = getattr(builtins, "exec") +else: + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + +# Defined here for backward compatibility: +def old_div(a, b): + """ + DEPRECATED: import ``old_div`` from ``past.utils`` instead. + + Equivalent to ``a / b`` on Python 2 without ``from __future__ import + division``. + + TODO: generalize this to other objects (like arrays etc.) + """ + if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): + return a // b + else: + return a / b + + +def as_native_str(encoding='utf-8'): + ''' + A decorator to turn a function or method call that returns text, i.e. + unicode, into one that returns a native platform str. + + Use it as a decorator like this:: + + from __future__ import unicode_literals + + class MyClass(object): + @as_native_str(encoding='ascii') + def __repr__(self): + return next(self._iter).upper() + ''' + if PY3: + return lambda f: f + else: + def encoder(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs).encode(encoding=encoding) + return wrapper + return encoder + +# listvalues and listitems definitions from Nick Coghlan's (withdrawn) +# PEP 496: +try: + dict.iteritems +except AttributeError: + # Python 3 + def listvalues(d): + return list(d.values()) + def listitems(d): + return list(d.items()) +else: + # Python 2 + def listvalues(d): + return d.values() + def listitems(d): + return d.items() + +if PY3: + def ensure_new_type(obj): + return obj +else: + def ensure_new_type(obj): + from future.types.newbytes import newbytes + from future.types.newstr import newstr + from future.types.newint import newint + from future.types.newdict import newdict + + native_type = type(native(obj)) + + # Upcast only if the type is already a native (non-future) type + if issubclass(native_type, type(obj)): + # Upcast + if native_type == str: # i.e. Py2 8-bit str + return newbytes(obj) + elif native_type == unicode: + return newstr(obj) + elif native_type == int: + return newint(obj) + elif native_type == long: + return newint(obj) + elif native_type == dict: + return newdict(obj) + else: + return obj + else: + # Already a new type + assert type(obj) in [newbytes, newstr] + return obj + + +__all__ = ['PY2', 'PY26', 'PY3', 'PYPY', + 'as_native_str', 'bind_method', 'bord', 'bstr', + 'bytes_to_native_str', 'encode_filename', 'ensure_new_type', + 'exec_', 'get_next', 'getexception', 'implements_iterator', + 'is_new_style', 'isbytes', 'isidentifier', 'isint', + 'isnewbytes', 'istext', 'iteritems', 'iterkeys', 'itervalues', + 'lfilter', 'listitems', 'listvalues', 'lmap', 'lrange', + 'lzip', 'native', 'native_bytes', 'native_str', + 'native_str_to_bytes', 'old_div', + 'python_2_unicode_compatible', 'raise_', + 'raise_with_traceback', 'reraise', 'text_to_native_str', + 'tobytes', 'viewitems', 'viewkeys', 'viewvalues', + 'with_metaclass' + ] diff --git a/.install/.kodi/addons/script.module.future/libs/future/utils/surrogateescape.py b/.install/.kodi/addons/script.module.future/libs/future/utils/surrogateescape.py new file mode 100644 index 000000000..0dcc9fa6e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/future/utils/surrogateescape.py @@ -0,0 +1,198 @@ +""" +This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error +handler of Python 3. + +Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc +""" + +# This code is released under the Python license and the BSD 2-clause license + +import codecs +import sys + +from future import utils + + +FS_ERRORS = 'surrogateescape' + +# # -- Python 2/3 compatibility ------------------------------------- +# FS_ERRORS = 'my_surrogateescape' + +def u(text): + if utils.PY3: + return text + else: + return text.decode('unicode_escape') + +def b(data): + if utils.PY3: + return data.encode('latin1') + else: + return data + +if utils.PY3: + _unichr = chr + bytes_chr = lambda code: bytes((code,)) +else: + _unichr = unichr + bytes_chr = chr + +def surrogateescape_handler(exc): + """ + Pure Python implementation of the PEP 383: the "surrogateescape" error + handler of Python 3. Undecodable bytes will be replaced by a Unicode + character U+DCxx on decoding, and these are translated into the + original bytes on encoding. + """ + mystring = exc.object[exc.start:exc.end] + + try: + if isinstance(exc, UnicodeDecodeError): + # mystring is a byte-string in this case + decoded = replace_surrogate_decode(mystring) + elif isinstance(exc, UnicodeEncodeError): + # In the case of u'\udcc3'.encode('ascii', + # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an + # exception anyway after this function is called, even though I think + # it's doing what it should. It seems that the strict encoder is called + # to encode the unicode string that this function returns ... + decoded = replace_surrogate_encode(mystring) + else: + raise exc + except NotASurrogateError: + raise exc + return (decoded, exc.end) + + +class NotASurrogateError(Exception): + pass + + +def replace_surrogate_encode(mystring): + """ + Returns a (unicode) string, not the more logical bytes, because the codecs + register_error functionality expects this. + """ + decoded = [] + for ch in mystring: + # if utils.PY3: + # code = ch + # else: + code = ord(ch) + + # The following magic comes from Py3.3's Python/codecs.c file: + if not 0xD800 <= code <= 0xDCFF: + # Not a surrogate. Fail with the original exception. + raise NotASurrogateError + # mybytes = [0xe0 | (code >> 12), + # 0x80 | ((code >> 6) & 0x3f), + # 0x80 | (code & 0x3f)] + # Is this a good idea? + if 0xDC00 <= code <= 0xDC7F: + decoded.append(_unichr(code - 0xDC00)) + elif code <= 0xDCFF: + decoded.append(_unichr(code - 0xDC00)) + else: + raise NotASurrogateError + return str().join(decoded) + + +def replace_surrogate_decode(mybytes): + """ + Returns a (unicode) string + """ + decoded = [] + for ch in mybytes: + # We may be parsing newbytes (in which case ch is an int) or a native + # str on Py2 + if isinstance(ch, int): + code = ch + else: + code = ord(ch) + if 0x80 <= code <= 0xFF: + decoded.append(_unichr(0xDC00 + code)) + elif code <= 0x7F: + decoded.append(_unichr(code)) + else: + # # It may be a bad byte + # # Try swallowing it. + # continue + # print("RAISE!") + raise NotASurrogateError + return str().join(decoded) + + +def encodefilename(fn): + if FS_ENCODING == 'ascii': + # ASCII encoder of Python 2 expects that the error handler returns a + # Unicode string encodable to ASCII, whereas our surrogateescape error + # handler has to return bytes in 0x80-0xFF range. + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if code < 128: + ch = bytes_chr(code) + elif 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + else: + raise UnicodeEncodeError(FS_ENCODING, + fn, index, index+1, + 'ordinal not in range(128)') + encoded.append(ch) + return bytes().join(encoded) + elif FS_ENCODING == 'utf-8': + # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF + # doesn't go through our error handler + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if 0xD800 <= code <= 0xDFFF: + if 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + encoded.append(ch) + else: + raise UnicodeEncodeError( + FS_ENCODING, + fn, index, index+1, 'surrogates not allowed') + else: + ch_utf8 = ch.encode('utf-8') + encoded.append(ch_utf8) + return bytes().join(encoded) + else: + return fn.encode(FS_ENCODING, FS_ERRORS) + +def decodefilename(fn): + return fn.decode(FS_ENCODING, FS_ERRORS) + +FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') +# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') +# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') + + +# normalize the filesystem encoding name. +# For example, we expect "utf-8", not "UTF8". +FS_ENCODING = codecs.lookup(FS_ENCODING).name + + +def register_surrogateescape(): + """ + Registers the surrogateescape error handler on Python 2 (only) + """ + if utils.PY3: + return + try: + codecs.lookup_error(FS_ERRORS) + except LookupError: + codecs.register_error(FS_ERRORS, surrogateescape_handler) + + +if __name__ == '__main__': + pass + # # Tests: + # register_surrogateescape() + + # b = decodefilename(fn) + # assert b == encoded, "%r != %r" % (b, encoded) + # c = encodefilename(b) + # assert c == fn, '%r != %r' % (c, fn) + # # print("ok") diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/__init__.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/__init__.py new file mode 100644 index 000000000..4cb1cbcd6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/__init__.py @@ -0,0 +1 @@ +# empty to make this a package diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixer_util.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixer_util.py new file mode 100644 index 000000000..48e4689db --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixer_util.py @@ -0,0 +1,520 @@ +""" +Utility functions from 2to3, 3to2 and python-modernize (and some home-grown +ones). + +Licences: +2to3: PSF License v2 +3to2: Apache Software License (from 3to2/setup.py) +python-modernize licence: BSD (from python-modernize/LICENSE) +""" + +from lib2to3.fixer_util import (FromImport, Newline, is_import, + find_root, does_tree_import, Comma) +from lib2to3.pytree import Leaf, Node +from lib2to3.pygram import python_symbols as syms, python_grammar +from lib2to3.pygram import token +from lib2to3.fixer_util import (Node, Call, Name, syms, Comma, Number) +import re + + +def canonical_fix_name(fix, avail_fixes): + """ + Examples: + >>> canonical_fix_name('fix_wrap_text_literals') + 'libfuturize.fixes.fix_wrap_text_literals' + >>> canonical_fix_name('wrap_text_literals') + 'libfuturize.fixes.fix_wrap_text_literals' + >>> canonical_fix_name('wrap_te') + ValueError("unknown fixer name") + >>> canonical_fix_name('wrap') + ValueError("ambiguous fixer name") + """ + if ".fix_" in fix: + return fix + else: + if fix.startswith('fix_'): + fix = fix[4:] + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + raise ValueError("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found)) + elif len(found) == 0: + raise ValueError("Unknown fixer. Use --list-fixes or -l for a list.") + return found[0] + + + +## These functions are from 3to2 by Joe Amenta: + +def Star(prefix=None): + return Leaf(token.STAR, u'*', prefix=prefix) + +def DoubleStar(prefix=None): + return Leaf(token.DOUBLESTAR, u'**', prefix=prefix) + +def Minus(prefix=None): + return Leaf(token.MINUS, u'-', prefix=prefix) + +def commatize(leafs): + """ + Accepts/turns: (Name, Name, ..., Name, Name) + Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name) + """ + new_leafs = [] + for leaf in leafs: + new_leafs.append(leaf) + new_leafs.append(Comma()) + del new_leafs[-1] + return new_leafs + +def indentation(node): + """ + Returns the indentation for this node + Iff a node is in a suite, then it has indentation. + """ + while node.parent is not None and node.parent.type != syms.suite: + node = node.parent + if node.parent is None: + return u"" + # The first three children of a suite are NEWLINE, INDENT, (some other node) + # INDENT.value contains the indentation for this suite + # anything after (some other node) has the indentation as its prefix. + if node.type == token.INDENT: + return node.value + elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT: + return node.prev_sibling.value + elif node.prev_sibling is None: + return u"" + else: + return node.prefix + +def indentation_step(node): + """ + Dirty little trick to get the difference between each indentation level + Implemented by finding the shortest indentation string + (technically, the "least" of all of the indentation strings, but + tabs and spaces mixed won't get this far, so those are synonymous.) + """ + r = find_root(node) + # Collect all indentations into one set. + all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT) + if not all_indents: + # nothing is indented anywhere, so we get to pick what we want + return u" " # four spaces is a popular convention + else: + return min(all_indents) + +def suitify(parent): + """ + Turn the stuff after the first colon in parent's children + into a suite, if it wasn't already + """ + for node in parent.children: + if node.type == syms.suite: + # already in the prefered format, do nothing + return + + # One-liners have no suite node, we have to fake one up + for i, node in enumerate(parent.children): + if node.type == token.COLON: + break + else: + raise ValueError(u"No class suite and no ':'!") + # Move everything into a suite node + suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))]) + one_node = parent.children[i+1] + one_node.remove() + one_node.prefix = u'' + suite.append_child(one_node) + parent.append_child(suite) + +def NameImport(package, as_name=None, prefix=None): + """ + Accepts a package (Name node), name to import it as (string), and + optional prefix and returns a node: + import [as ] + """ + if prefix is None: + prefix = u"" + children = [Name(u"import", prefix=prefix), package] + if as_name is not None: + children.extend([Name(u"as", prefix=u" "), + Name(as_name, prefix=u" ")]) + return Node(syms.import_name, children) + +_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt) +_import_stmts = (syms.import_name, syms.import_from) + +def import_binding_scope(node): + """ + Generator yields all nodes for which a node (an import_stmt) has scope + The purpose of this is for a call to _find() on each of them + """ + # import_name / import_from are small_stmts + assert node.type in _import_stmts + test = node.next_sibling + # A small_stmt can only be followed by a SEMI or a NEWLINE. + while test.type == token.SEMI: + nxt = test.next_sibling + # A SEMI can only be followed by a small_stmt or a NEWLINE + if nxt.type == token.NEWLINE: + break + else: + yield nxt + # A small_stmt can only be followed by either a SEMI or a NEWLINE + test = nxt.next_sibling + # Covered all subsequent small_stmts after the import_stmt + # Now to cover all subsequent stmts after the parent simple_stmt + parent = node.parent + assert parent.type == syms.simple_stmt + test = parent.next_sibling + while test is not None: + # Yes, this will yield NEWLINE and DEDENT. Deal with it. + yield test + test = test.next_sibling + + context = parent.parent + # Recursively yield nodes following imports inside of a if/while/for/try/with statement + if context.type in _compound_stmts: + # import is in a one-liner + c = context + while c.next_sibling is not None: + yield c.next_sibling + c = c.next_sibling + context = context.parent + + # Can't chain one-liners on one line, so that takes care of that. + + p = context.parent + if p is None: + return + + # in a multi-line suite + + while p.type in _compound_stmts: + + if context.type == syms.suite: + yield context + + context = context.next_sibling + + if context is None: + context = p.parent + p = context.parent + if p is None: + break + +def ImportAsName(name, as_name, prefix=None): + new_name = Name(name) + new_as = Name(u"as", prefix=u" ") + new_as_name = Name(as_name, prefix=u" ") + new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name]) + if prefix is not None: + new_node.prefix = prefix + return new_node + + +def is_docstring(node): + """ + Returns True if the node appears to be a docstring + """ + return (node.type == syms.simple_stmt and + len(node.children) > 0 and node.children[0].type == token.STRING) + + +def future_import(feature, node): + """ + This seems to work + """ + root = find_root(node) + + if does_tree_import(u"__future__", feature, node): + return + + # Look for a shebang or encoding line + shebang_encoding_idx = None + + for idx, node in enumerate(root.children): + # Is it a shebang or encoding line? + if is_shebang_comment(node) or is_encoding_comment(node): + shebang_encoding_idx = idx + if is_docstring(node): + # skip over docstring + continue + names = check_future_import(node) + if not names: + # not a future statement; need to insert before this + break + if feature in names: + # already imported + return + + import_ = FromImport(u'__future__', [Leaf(token.NAME, feature, prefix=" ")]) + if shebang_encoding_idx == 0 and idx == 0: + # If this __future__ import would go on the first line, + # detach the shebang / encoding prefix from the current first line. + # and attach it to our new __future__ import node. + import_.prefix = root.children[0].prefix + root.children[0].prefix = u'' + # End the __future__ import line with a newline and add a blank line + # afterwards: + children = [import_ , Newline()] + root.insert_child(idx, Node(syms.simple_stmt, children)) + + +def future_import2(feature, node): + """ + An alternative to future_import() which might not work ... + """ + root = find_root(node) + + if does_tree_import(u"__future__", feature, node): + return + + insert_pos = 0 + for idx, node in enumerate(root.children): + if node.type == syms.simple_stmt and node.children and \ + node.children[0].type == token.STRING: + insert_pos = idx + 1 + break + + for thing_after in root.children[insert_pos:]: + if thing_after.type == token.NEWLINE: + insert_pos += 1 + continue + + prefix = thing_after.prefix + thing_after.prefix = u"" + break + else: + prefix = u"" + + import_ = FromImport(u"__future__", [Leaf(token.NAME, feature, prefix=u" ")]) + + children = [import_, Newline()] + root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix)) + +def parse_args(arglist, scheme): + u""" + Parse a list of arguments into a dict + """ + arglist = [i for i in arglist if i.type != token.COMMA] + + ret_mapping = dict([(k, None) for k in scheme]) + + for i, arg in enumerate(arglist): + if arg.type == syms.argument and arg.children[1].type == token.EQUAL: + # argument < NAME '=' any > + slot = arg.children[0].value + ret_mapping[slot] = arg.children[2] + else: + slot = scheme[i] + ret_mapping[slot] = arg + + return ret_mapping + + +# def is_import_from(node): +# """Returns true if the node is a statement "from ... import ..." +# """ +# return node.type == syms.import_from + + +def is_import_stmt(node): + return (node.type == syms.simple_stmt and node.children and + is_import(node.children[0])) + + +def touch_import_top(package, name_to_import, node): + """Works like `does_tree_import` but adds an import statement at the + top if it was not imported (but below any __future__ imports) and below any + comments such as shebang lines). + + Based on lib2to3.fixer_util.touch_import() + + Calling this multiple times adds the imports in reverse order. + + Also adds "standard_library.install_aliases()" after "from future import + standard_library". This should probably be factored into another function. + """ + + root = find_root(node) + + if does_tree_import(package, name_to_import, root): + return + + # Ideally, we would look for whether futurize --all-imports has been run, + # as indicated by the presence of ``from builtins import (ascii, ..., + # zip)`` -- and, if it has, we wouldn't import the name again. + + # Look for __future__ imports and insert below them + found = False + for name in ['absolute_import', 'division', 'print_function', + 'unicode_literals']: + if does_tree_import('__future__', name, root): + found = True + break + if found: + # At least one __future__ import. We want to loop until we've seen them + # all. + start, end = None, None + for idx, node in enumerate(root.children): + if check_future_import(node): + start = idx + # Start looping + idx2 = start + while node: + node = node.next_sibling + idx2 += 1 + if not check_future_import(node): + end = idx2 + break + break + assert start is not None + assert end is not None + insert_pos = end + else: + # No __future__ imports. + # We look for a docstring and insert the new node below that. If no docstring + # exists, just insert the node at the top. + for idx, node in enumerate(root.children): + if node.type != syms.simple_stmt: + break + if not is_docstring(node): + # This is the usual case. + break + insert_pos = idx + + if package is None: + import_ = Node(syms.import_name, [ + Leaf(token.NAME, u"import"), + Leaf(token.NAME, name_to_import, prefix=u" ") + ]) + else: + import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) + if name_to_import == u'standard_library': + # Add: + # standard_library.install_aliases() + # after: + # from future import standard_library + install_hooks = Node(syms.simple_stmt, + [Node(syms.power, + [Leaf(token.NAME, u'standard_library'), + Node(syms.trailer, [Leaf(token.DOT, u'.'), + Leaf(token.NAME, u'install_aliases')]), + Node(syms.trailer, [Leaf(token.LPAR, u'('), + Leaf(token.RPAR, u')')]) + ]) + ] + ) + children_hooks = [install_hooks, Newline()] + else: + children_hooks = [] + + # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) + + children_import = [import_, Newline()] + old_prefix = root.children[insert_pos].prefix + root.children[insert_pos].prefix = u'' + root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix)) + if len(children_hooks) > 0: + root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks)) + + +## The following functions are from python-modernize by Armin Ronacher: +# (a little edited). + +def check_future_import(node): + """If this is a future import, return set of symbols that are imported, + else return None.""" + # node should be the import statement here + savenode = node + if not (node.type == syms.simple_stmt and node.children): + return set() + node = node.children[0] + # now node is the import_from node + if not (node.type == syms.import_from and + # node.type == token.NAME and # seems to break it + hasattr(node.children[1], 'value') and + node.children[1].value == u'__future__'): + return set() + if node.children[3].type == token.LPAR: + node = node.children[4] + else: + node = node.children[3] + # now node is the import_as_name[s] + # print(python_grammar.number2symbol[node.type]) # breaks sometimes + if node.type == syms.import_as_names: + result = set() + for n in node.children: + if n.type == token.NAME: + result.add(n.value) + elif n.type == syms.import_as_name: + n = n.children[0] + assert n.type == token.NAME + result.add(n.value) + return result + elif node.type == syms.import_as_name: + node = node.children[0] + assert node.type == token.NAME + return set([node.value]) + elif node.type == token.NAME: + return set([node.value]) + else: + # TODO: handle brackets like this: + # from __future__ import (absolute_import, division) + assert False, "strange import: %s" % savenode + + +SHEBANG_REGEX = r'^#!.*python' +ENCODING_REGEX = r"^#.*coding[:=]\s*([-\w.]+)" + + +def is_shebang_comment(node): + """ + Comments are prefixes for Leaf nodes. Returns whether the given node has a + prefix that looks like a shebang line or an encoding line: + + #!/usr/bin/env python + #!/usr/bin/python3 + """ + return bool(re.match(SHEBANG_REGEX, node.prefix)) + + +def is_encoding_comment(node): + """ + Comments are prefixes for Leaf nodes. Returns whether the given node has a + prefix that looks like an encoding line: + + # coding: utf-8 + # encoding: utf-8 + # -*- coding: -*- + # vim: set fileencoding= : + """ + return bool(re.match(ENCODING_REGEX, node.prefix)) + + +def wrap_in_fn_call(fn_name, args, prefix=None): + """ + Example: + >>> wrap_in_fn_call("oldstr", (arg,)) + oldstr(arg) + + >>> wrap_in_fn_call("olddiv", (arg1, arg2)) + olddiv(arg1, arg2) + + >>> wrap_in_fn_call("olddiv", [arg1, comma, arg2, comma, arg3]) + olddiv(arg1, arg2, arg3) + """ + assert len(args) > 0 + if len(args) == 2: + expr1, expr2 = args + newargs = [expr1, Comma(), expr2] + else: + newargs = args + return Call(Name(fn_name), newargs, prefix=prefix) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/__init__.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/__init__.py new file mode 100644 index 000000000..7de304da7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/__init__.py @@ -0,0 +1,96 @@ +import sys +from lib2to3 import refactor + +# The following fixers are "safe": they convert Python 2 code to more +# modern Python 2 code. They should be uncontroversial to apply to most +# projects that are happy to drop support for Py2.5 and below. Applying +# them first will reduce the size of the patch set for the real porting. +lib2to3_fix_names_stage1 = set([ + 'lib2to3.fixes.fix_apply', + 'lib2to3.fixes.fix_except', + 'lib2to3.fixes.fix_exec', + 'lib2to3.fixes.fix_exitfunc', + 'lib2to3.fixes.fix_funcattrs', + 'lib2to3.fixes.fix_has_key', + 'lib2to3.fixes.fix_idioms', + # 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) + 'lib2to3.fixes.fix_intern', + 'lib2to3.fixes.fix_isinstance', + 'lib2to3.fixes.fix_methodattrs', + 'lib2to3.fixes.fix_ne', + # 'lib2to3.fixes.fix_next', # would replace ``next`` method names + # with ``__next__``. + 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 + 'lib2to3.fixes.fix_paren', + # 'lib2to3.fixes.fix_print', # see the libfuturize fixer that also + # adds ``from __future__ import print_function`` + # 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions + 'lib2to3.fixes.fix_reduce', # reduce is available in functools on Py2.6/Py2.7 + 'lib2to3.fixes.fix_renames', # sys.maxint -> sys.maxsize + # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support + 'lib2to3.fixes.fix_repr', + 'lib2to3.fixes.fix_standarderror', + 'lib2to3.fixes.fix_sys_exc', + 'lib2to3.fixes.fix_throw', + 'lib2to3.fixes.fix_tuple_params', + 'lib2to3.fixes.fix_types', + 'lib2to3.fixes.fix_ws_comma', # can perhaps decrease readability: see issue #58 + 'lib2to3.fixes.fix_xreadlines', +]) + +# The following fixers add a dependency on the ``future`` package on order to +# support Python 2: +lib2to3_fix_names_stage2 = set([ + # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. + # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ + 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 + # 'lib2to3.fixes.fix_execfile', # some problems: see issue #37. + # We use a custom fixer instead (see below) + # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports + 'lib2to3.fixes.fix_getcwdu', + # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library + # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) + 'lib2to3.fixes.fix_input', + 'lib2to3.fixes.fix_itertools', + 'lib2to3.fixes.fix_itertools_imports', + 'lib2to3.fixes.fix_filter', + 'lib2to3.fixes.fix_long', + 'lib2to3.fixes.fix_map', + # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead + 'lib2to3.fixes.fix_next', + 'lib2to3.fixes.fix_nonzero', # TODO: cause this to import ``object`` and/or add a decorator for mapping __bool__ to __nonzero__ + 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 + 'lib2to3.fixes.fix_raw_input', + # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings + # 'lib2to3.fixes.fix_urllib', # included in libfuturize.fix_future_standard_library_urllib + # 'lib2to3.fixes.fix_xrange', # custom one because of a bug with Py3.3's lib2to3 + 'lib2to3.fixes.fix_zip', +]) + +libfuturize_fix_names_stage1 = set([ + 'libfuturize.fixes.fix_absolute_import', + 'libfuturize.fixes.fix_next_call', # obj.next() -> next(obj). Unlike + # lib2to3.fixes.fix_next, doesn't change + # the ``next`` method to ``__next__``. + 'libfuturize.fixes.fix_print_with_import', + 'libfuturize.fixes.fix_raise', + # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing +]) + +libfuturize_fix_names_stage2 = set([ + 'libfuturize.fixes.fix_basestring', + # 'libfuturize.fixes.fix_add__future__imports_except_unicode_literals', # just in case + 'libfuturize.fixes.fix_cmp', + 'libfuturize.fixes.fix_division_safe', + 'libfuturize.fixes.fix_execfile', + 'libfuturize.fixes.fix_future_builtins', + 'libfuturize.fixes.fix_future_standard_library', + 'libfuturize.fixes.fix_future_standard_library_urllib', + 'libfuturize.fixes.fix_metaclass', + 'libpasteurize.fixes.fix_newstyle', + 'libfuturize.fixes.fix_object', + # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing + 'libfuturize.fixes.fix_unicode_keep_u', + # 'libfuturize.fixes.fix_unicode_literals_import', + 'libfuturize.fixes.fix_xrange_with_import', # custom one because of a bug with Py3.3's lib2to3 +]) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_UserDict.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_UserDict.py new file mode 100644 index 000000000..cb0cfacc6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_UserDict.py @@ -0,0 +1,102 @@ +"""Fix UserDict. + +Incomplete! + +TODO: base this on fix_urllib perhaps? +""" + + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, attr_chain +from lib2to3.fixes.fix_imports import alternates, build_pattern, FixImports + +MAPPING = {'UserDict': 'collections', +} + +# def alternates(members): +# return "(" + "|".join(map(repr, members)) + ")" +# +# +# def build_pattern(mapping=MAPPING): +# mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) +# bare_names = alternates(mapping.keys()) +# +# yield """name_import=import_name< 'import' ((%s) | +# multiple_imports=dotted_as_names< any* (%s) any* >) > +# """ % (mod_list, mod_list) +# yield """import_from< 'from' (%s) 'import' ['('] +# ( any | import_as_name< any 'as' any > | +# import_as_names< any* >) [')'] > +# """ % mod_list +# yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | +# multiple_imports=dotted_as_names< +# any* dotted_as_name< (%s) 'as' any > any* >) > +# """ % (mod_list, mod_list) +# +# # Find usages of module members in code e.g. thread.foo(bar) +# yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names + + +# class FixUserDict(fixer_base.BaseFix): +class FixUserdict(FixImports): + + BM_compatible = True + keep_line_order = True + # This is overridden in fix_imports2. + mapping = MAPPING + + # We want to run this fixer late, so fix_import doesn't try to make stdlib + # renames into relative imports. + run_order = 6 + + def build_pattern(self): + return "|".join(build_pattern(self.mapping)) + + def compile_pattern(self): + # We override this, so MAPPING can be pragmatically altered and the + # changes will be reflected in PATTERN. + self.PATTERN = self.build_pattern() + super(FixImports, self).compile_pattern() + + # Don't match the node if it's within another match. + def match(self, node): + match = super(FixImports, self).match + results = match(node) + if results: + # Module usage could be in the trailer of an attribute lookup, so we + # might have nested matches when "bare_with_attr" is present. + if "bare_with_attr" not in results and \ + any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixImports, self).start_tree(tree, filename) + self.replace = {} + + def transform(self, node, results): + import_mod = results.get("module_name") + if import_mod: + mod_name = import_mod.value + new_name = unicode(self.mapping[mod_name]) + import_mod.replace(Name(new_name, prefix=import_mod.prefix)) + if "name_import" in results: + # If it's not a "from x import x, y" or "import x as y" import, + # marked its usage to be replaced. + self.replace[mod_name] = new_name + if "multiple_imports" in results: + # This is a nasty hack to fix multiple imports on a line (e.g., + # "import StringIO, urlparse"). The problem is that I can't + # figure out an easy way to make a pattern recognize the keys of + # MAPPING randomly sprinkled in an import statement. + results = self.match(node) + if results: + self.transform(node, results) + else: + # Replace usage of the module. + bare_name = results["bare_with_attr"][0] + new_name = self.replace.get(bare_name.value) + if new_name: + bare_name.replace(Name(new_name, prefix=bare_name.prefix)) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_absolute_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_absolute_import.py new file mode 100644 index 000000000..eab9c527d --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_absolute_import.py @@ -0,0 +1,91 @@ +""" +Fixer for import statements, with a __future__ import line. + +Based on lib2to3/fixes/fix_import.py, but extended slightly so it also +supports Cython modules. + +If spam is being imported from the local directory, this import: + from spam import eggs +becomes: + from __future__ import absolute_import + from .spam import eggs + +and this import: + import spam +becomes: + from __future__ import absolute_import + from . import spam +""" + +from os.path import dirname, join, exists, sep +from lib2to3.fixes.fix_import import FixImport +from lib2to3.fixer_util import FromImport, syms +from lib2to3.fixes.fix_import import traverse_imports + +from libfuturize.fixer_util import future_import + + +class FixAbsoluteImport(FixImport): + run_order = 9 + + def transform(self, node, results): + """ + Copied from FixImport.transform(), but with this line added in + any modules that had implicit relative imports changed: + + from __future__ import absolute_import" + """ + if self.skip: + return + imp = results['imp'] + + if node.type == syms.import_from: + # Some imps are top-level (eg: 'import ham') + # some are first level (eg: 'import ham.eggs') + # some are third level (eg: 'import ham.eggs as spam') + # Hence, the loop + while not hasattr(imp, 'value'): + imp = imp.children[0] + if self.probably_a_local_import(imp.value): + imp.value = u"." + imp.value + imp.changed() + future_import(u"absolute_import", node) + else: + have_local = False + have_absolute = False + for mod_name in traverse_imports(imp): + if self.probably_a_local_import(mod_name): + have_local = True + else: + have_absolute = True + if have_absolute: + if have_local: + # We won't handle both sibling and absolute imports in the + # same statement at the moment. + self.warning(node, "absolute and local imports together") + return + + new = FromImport(u".", [imp]) + new.prefix = node.prefix + future_import(u"absolute_import", node) + return new + + def probably_a_local_import(self, imp_name): + """ + Like the corresponding method in the base class, but this also + supports Cython modules. + """ + if imp_name.startswith(u"."): + # Relative imports are certainly not local imports. + return False + imp_name = imp_name.split(u".", 1)[0] + base_path = dirname(self.filename) + base_path = join(base_path, imp_name) + # If there is no __init__.py next to the file its not in a package + # so can't be a relative import. + if not exists(join(dirname(base_path), "__init__.py")): + return False + for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]: + if exists(base_path + ext): + return True + return False diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py new file mode 100644 index 000000000..1d419a1c6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py @@ -0,0 +1,26 @@ +""" +Fixer for adding: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + +This is "stage 1": hopefully uncontroversial changes. + +Stage 2 adds ``unicode_literals``. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixAddFutureImportsExceptUnicodeLiterals(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 9 + + def transform(self, node, results): + # Reverse order: + future_import(u"print_function", node) + future_import(u"division", node) + future_import(u"absolute_import", node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_basestring.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_basestring.py new file mode 100644 index 000000000..5676d08fc --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_basestring.py @@ -0,0 +1,17 @@ +""" +Fixer that adds ``from past.builtins import basestring`` if there is a +reference to ``basestring`` +""" + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixBasestring(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = "'basestring'" + + def transform(self, node, results): + touch_import_top(u'past.builtins', 'basestring', node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_bytes.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_bytes.py new file mode 100644 index 000000000..42021223a --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_bytes.py @@ -0,0 +1,24 @@ +"""Optional fixer that changes all unprefixed string literals "..." to b"...". + +br'abcd' is a SyntaxError on Python 2 but valid on Python 3. +ur'abcd' is a SyntaxError on Python 3 but valid on Python 2. + +""" +from __future__ import unicode_literals + +import re +from lib2to3.pgen2 import token +from lib2to3 import fixer_base + +_literal_re = re.compile(r"[^bBuUrR]?[\'\"]") + +class FixBytes(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING" + + def transform(self, node, results): + if node.type == token.STRING: + if _literal_re.match(node.value): + new = node.clone() + new.value = u'b' + new.value + return new diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_cmp.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_cmp.py new file mode 100644 index 000000000..762eb4b42 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_cmp.py @@ -0,0 +1,33 @@ +# coding: utf-8 +""" +Fixer for the cmp() function on Py2, which was removed in Py3. + +Adds this import line:: + + from past.builtins import cmp + +if cmp() is called in the code. +""" + +from __future__ import unicode_literals +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +expression = "name='cmp'" + + +class FixCmp(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'past.builtins', name.value, node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division.py new file mode 100644 index 000000000..6975a52bb --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division.py @@ -0,0 +1,12 @@ +""" +UNFINISHED +For the ``future`` package. + +Adds this import line: + + from __future__ import division + +at the top so the code runs identically on Py3 and Py2.6/2.7 +""" + +from libpasteurize.fixes.fix_division import FixDivision diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division_safe.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division_safe.py new file mode 100644 index 000000000..7b0f3cbd7 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_division_safe.py @@ -0,0 +1,109 @@ +""" +For the ``future`` package. + +Adds this import line: + + from __future__ import division + +at the top and changes any old-style divisions to be calls to +past.utils.old_div so the code runs as before on Py2.6/2.7 and has the same +behaviour on Py3. + +If "from __future__ import division" is already in effect, this fixer does +nothing. +""" + +import re +import lib2to3.pytree as pytree +from lib2to3.fixer_util import Leaf, Node, Comma +from lib2to3 import fixer_base +from lib2to3.fixer_util import syms, does_tree_import +from libfuturize.fixer_util import (token, future_import, touch_import_top, + wrap_in_fn_call) + + +def match_division(node): + u""" + __future__.division redefines the meaning of a single slash for division, + so we match that and only that. + """ + slash = token.SLASH + return node.type == slash and not node.next_sibling.type == slash and \ + not node.prev_sibling.type == slash + +const_re = re.compile('^[0-9]*[.][0-9]*$') + +def is_floaty(node, div_idx): + return _is_floaty(node.children[0:div_idx]) or _is_floaty(node.children[div_idx+1:]) + + +def _is_floaty(expr): + if isinstance(expr, list): + expr = expr[0] + + if isinstance(expr, Leaf): + # If it's a leaf, let's see if it's a numeric constant containing a '.' + return const_re.match(expr.value) + elif isinstance(expr, Node): + # If the expression is a node, let's see if it's a direct cast to float + if isinstance(expr.children[0], Leaf): + return expr.children[0].value == u'float' + return False + +def find_division(node): + for i, child in enumerate(node.children): + if match_division(child): + return i + return False + +def clone_div_operands(node, div_idx): + children = [] + for i, child in enumerate(node.children): + if i == div_idx: + children.append(Comma()) + else: + children.append(child.clone()) + + # Strip any leading space for the first number: + children[0].prefix = u'' + + return children + +class FixDivisionSafe(fixer_base.BaseFix): + # BM_compatible = True + run_order = 4 # this seems to be ignored? + + _accept_type = token.SLASH + + PATTERN = """ + term<(not('/') any)+ '/' ((not('/') any))> + """ + + def start_tree(self, tree, name): + """ + Skip this fixer if "__future__.division" is already imported. + """ + super(FixDivisionSafe, self).start_tree(tree, name) + self.skip = "division" in tree.future_features + + def match(self, node): + u""" + Since the tree needs to be fixed once and only once if and only if it + matches, we can start discarding matches after the first. + """ + if node.type == self.syms.term: + div_idx = find_division(node) + if div_idx is not False: + # if expr1 or expr2 are obviously floats, we don't need to wrap in + # old_div, as the behavior of division between any number and a float + # should be the same in 2 or 3 + if not is_floaty(node, div_idx): + return clone_div_operands(node, div_idx) + return False + + def transform(self, node, results): + if self.skip: + return + future_import(u"division", node) + touch_import_top(u'past.utils', u'old_div', node) + return wrap_in_fn_call("old_div", results, prefix=node.prefix) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_execfile.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_execfile.py new file mode 100644 index 000000000..cfe9d8d0f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_execfile.py @@ -0,0 +1,37 @@ +# coding: utf-8 +""" +Fixer for the execfile() function on Py2, which was removed in Py3. + +The Lib/lib2to3/fixes/fix_execfile.py module has some problems: see +python-future issue #37. This fixer merely imports execfile() from +past.builtins and leaves the code alone. + +Adds this import line:: + + from past.builtins import execfile + +for the function execfile() that was removed from Py3. +""" + +from __future__ import unicode_literals +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +expression = "name='execfile'" + + +class FixExecfile(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'past.builtins', name.value, node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_builtins.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_builtins.py new file mode 100644 index 000000000..eea6c6a1e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_builtins.py @@ -0,0 +1,59 @@ +""" +For the ``future`` package. + +Adds this import line:: + + from builtins import XYZ + +for each of the functions XYZ that is used in the module. + +Adds these imports after any other imports (in an initial block of them). +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base +from lib2to3.pygram import python_symbols as syms +from lib2to3.fixer_util import Name, Call, in_special_context + +from libfuturize.fixer_util import touch_import_top + +# All builtins are: +# from future.builtins.iterators import (filter, map, zip) +# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) +# from future.types import (bytes, dict, int, range, str) +# We don't need isinstance any more. + +replaced_builtin_fns = '''filter map zip + ascii chr hex input next oct + bytes range str raw_input'''.split() + # This includes raw_input as a workaround for the + # lib2to3 fixer for raw_input on Py3 (only), allowing + # the correct import to be included. (Py3 seems to run + # the fixers the wrong way around, perhaps ignoring the + # run_order class attribute below ...) + +expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtin_fns]) + + +class FixFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + run_order = 7 + + # Currently we only match uses as a function. This doesn't match e.g.: + # if isinstance(s, str): + # ... + PATTERN = """ + power< + ({0}) trailer< '(' [arglist=any] ')' > + rest=any* > + | + power< + 'map' trailer< '(' [arglist=any] ')' > + > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'builtins', name.value, node) + # name.replace(Name(u"input", prefix=name.prefix)) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library.py new file mode 100644 index 000000000..a1c3f3d4e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library.py @@ -0,0 +1,24 @@ +""" +For the ``future`` package. + +Changes any imports needed to reflect the standard library reorganization. Also +Also adds these import lines: + + from future import standard_library + standard_library.install_aliases() + +after any __future__ imports but before any other imports. +""" + +from lib2to3.fixes.fix_imports import FixImports +from libfuturize.fixer_util import touch_import_top + + +class FixFutureStandardLibrary(FixImports): + run_order = 8 + + def transform(self, node, results): + result = super(FixFutureStandardLibrary, self).transform(node, results) + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', node) + return result diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library_urllib.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library_urllib.py new file mode 100644 index 000000000..cf6738845 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_future_standard_library_urllib.py @@ -0,0 +1,28 @@ +""" +For the ``future`` package. + +A special fixer that ensures that these lines have been added:: + + from future import standard_library + standard_library.install_hooks() + +even if the only module imported was ``urllib``, in which case the regular fixer +wouldn't have added these lines. + +""" + +from lib2to3.fixes.fix_urllib import FixUrllib +from libfuturize.fixer_util import touch_import_top, find_root + + +class FixFutureStandardLibraryUrllib(FixUrllib): # not a subclass of FixImports + run_order = 8 + + def transform(self, node, results): + # transform_member() in lib2to3/fixes/fix_urllib.py breaks node so find_root(node) + # no longer works after the super() call below. So we find the root first: + root = find_root(node) + result = super(FixFutureStandardLibraryUrllib, self).transform(node, results) + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', root) + return result diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_metaclass.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_metaclass.py new file mode 100644 index 000000000..2ac41c972 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_metaclass.py @@ -0,0 +1,262 @@ +# coding: utf-8 +"""Fixer for __metaclass__ = X -> (future.utils.with_metaclass(X)) methods. + + The various forms of classef (inherits nothing, inherits once, inherints + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. +""" +# This is a derived work of Lib/lib2to3/fixes/fix_metaclass.py under the +# copyright of the Python Software Foundation, licensed under the Python +# Software Foundation License 2. +# +# Copyright notice: +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012, 2013 Python Software Foundation. All rights reserved. +# +# Full license text: http://docs.python.org/3.4/license.html + +# Author: Jack Diederich, Daniel Neuhäuser + +# Local imports +from lib2to3 import fixer_base +from lib2to3.pygram import token +from lib2to3.fixer_util import Name, syms, Node, Leaf, touch_import, Call, \ + String, Comma, parenthesize + + +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possiblities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ + for node in parent.children: + if node.type == syms.suite: + return has_metaclass(node) + elif node.type == syms.simple_stmt and node.children: + expr_node = node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + left_side = expr_node.children[0] + if isinstance(left_side, Leaf) and \ + left_side.value == '__metaclass__': + return True + return False + + +def fixup_parse_tree(cls_node): + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ + for node in cls_node.children: + if node.type == syms.suite: + # already in the preferred format, do nothing + return + + # !%@#! oneliners have no suite node, we have to fake one up + for i, node in enumerate(cls_node.children): + if node.type == token.COLON: + break + else: + raise ValueError("No class suite and no ':'!") + + # move everything into a suite node + suite = Node(syms.suite, []) + while cls_node.children[i+1:]: + move_node = cls_node.children[i+1] + suite.append_child(move_node.clone()) + move_node.remove() + cls_node.append_child(suite) + node = suite + + +def fixup_simple_stmt(parent, i, stmt_node): + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything efter the semi-colon into its own simple_stmt node + """ + for semi_ind, node in enumerate(stmt_node.children): + if node.type == token.SEMI: # *sigh* + break + else: + return + + node.remove() # kill the semicolon + new_expr = Node(syms.expr_stmt, []) + new_stmt = Node(syms.simple_stmt, [new_expr]) + while stmt_node.children[semi_ind:]: + move_node = stmt_node.children[semi_ind] + new_expr.append_child(move_node.clone()) + move_node.remove() + parent.insert_child(i, new_stmt) + new_leaf1 = new_stmt.children[0].children[0] + old_leaf1 = stmt_node.children[0].children[0] + new_leaf1.prefix = old_leaf1.prefix + + +def remove_trailing_newline(node): + if node.children and node.children[-1].type == token.NEWLINE: + node.children[-1].remove() + + +def find_metas(cls_node): + # find the suite node (Mmm, sweet nodes) + for node in cls_node.children: + if node.type == syms.suite: + break + else: + raise ValueError("No class suite!") + + # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] + for i, simple_node in list(enumerate(node.children)): + if simple_node.type == syms.simple_stmt and simple_node.children: + expr_node = simple_node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + # Check if the expr_node is a simple assignment. + left_node = expr_node.children[0] + if isinstance(left_node, Leaf) and \ + left_node.value == u'__metaclass__': + # We found a assignment to __metaclass__. + fixup_simple_stmt(node, i, simple_node) + remove_trailing_newline(simple_node) + yield (node, i, simple_node) + + +def fixup_indent(suite): + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ + kids = suite.children[::-1] + # find the first indent + while kids: + node = kids.pop() + if node.type == token.INDENT: + break + + # find the first Leaf + while kids: + node = kids.pop() + if isinstance(node, Leaf) and node.type != token.DEDENT: + if node.prefix: + node.prefix = u'' + return + else: + kids.extend(node.children[::-1]) + + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + classdef + """ + + def transform(self, node, results): + if not has_metaclass(node): + return + + fixup_parse_tree(node) + + # find metaclasses, keep the last one + last_metaclass = None + for suite, i, stmt in find_metas(node): + last_metaclass = stmt + stmt.remove() + + text_type = node.children[0].type # always Leaf(nnn, 'class') + + # figure out what kind of classdef we have + if len(node.children) == 7: + # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) + # 0 1 2 3 4 5 6 + if node.children[3].type == syms.arglist: + arglist = node.children[3] + # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) + else: + parent = node.children[3].clone() + arglist = Node(syms.arglist, [parent]) + node.set_child(3, arglist) + elif len(node.children) == 6: + # Node(classdef, ['class', 'name', '(', ')', ':', suite]) + # 0 1 2 3 4 5 + arglist = Node(syms.arglist, []) + node.insert_child(3, arglist) + elif len(node.children) == 4: + # Node(classdef, ['class', 'name', ':', suite]) + # 0 1 2 3 + arglist = Node(syms.arglist, []) + node.insert_child(2, Leaf(token.RPAR, u')')) + node.insert_child(2, arglist) + node.insert_child(2, Leaf(token.LPAR, u'(')) + else: + raise ValueError("Unexpected class definition") + + # now stick the metaclass in the arglist + meta_txt = last_metaclass.children[0].children[0] + meta_txt.value = 'metaclass' + orig_meta_prefix = meta_txt.prefix + + # Was: touch_import(None, u'future.utils', node) + touch_import(u'future.utils', u'with_metaclass', node) + + metaclass = last_metaclass.children[0].children[2].clone() + metaclass.prefix = u'' + + arguments = [metaclass] + + if arglist.children: + if len(arglist.children) == 1: + base = arglist.children[0].clone() + base.prefix = u' ' + else: + # Unfortunately six.with_metaclass() only allows one base + # class, so we have to dynamically generate a base class if + # there is more than one. + bases = parenthesize(arglist.clone()) + bases.prefix = u' ' + base = Call(Name('type'), [ + String("'NewBase'"), + Comma(), + bases, + Comma(), + Node( + syms.atom, + [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')], + prefix=u' ' + ) + ], prefix=u' ') + arguments.extend([Comma(), base]) + + arglist.replace(Call( + Name(u'with_metaclass', prefix=arglist.prefix), + arguments + )) + + fixup_indent(suite) + + # check for empty suite + if not suite.children: + # one-liner that was just __metaclass_ + suite.remove() + pass_leaf = Leaf(text_type, u'pass') + pass_leaf.prefix = orig_meta_prefix + node.append_child(pass_leaf) + node.append_child(Leaf(token.NEWLINE, u'\n')) + + elif len(suite.children) > 1 and \ + (suite.children[-2].type == token.INDENT and + suite.children[-1].type == token.DEDENT): + # there was only one line in the class body and it was __metaclass__ + pass_leaf = Leaf(text_type, u'pass') + suite.insert_child(-1, pass_leaf) + suite.insert_child(-1, Leaf(token.NEWLINE, u'\n')) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_next_call.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_next_call.py new file mode 100644 index 000000000..282f18522 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_next_call.py @@ -0,0 +1,104 @@ +""" +Based on fix_next.py by Collin Winter. + +Replaces it.next() -> next(it), per PEP 3114. + +Unlike fix_next.py, this fixer doesn't replace the name of a next method with __next__, +which would break Python 2 compatibility without further help from fixers in +stage 2. +""" + +# Local imports +from lib2to3.pgen2 import token +from lib2to3.pygram import python_symbols as syms +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, Call, find_binding + +bind_warning = "Calls to builtin next() possibly shadowed by global binding" + + +class FixNextCall(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > + | + power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > + | + global=global_stmt< 'global' any* 'next' any* > + """ + + order = "pre" # Pre-order tree traversal + + def start_tree(self, tree, filename): + super(FixNextCall, self).start_tree(tree, filename) + + n = find_binding('next', tree) + if n: + self.warning(n, bind_warning) + self.shadowed_next = True + else: + self.shadowed_next = False + + def transform(self, node, results): + assert results + + base = results.get("base") + attr = results.get("attr") + name = results.get("name") + + if base: + if self.shadowed_next: + # Omit this: + # attr.replace(Name("__next__", prefix=attr.prefix)) + pass + else: + base = [n.clone() for n in base] + base[0].prefix = "" + node.replace(Call(Name("next", prefix=node.prefix), base)) + elif name: + # Omit this: + # n = Name("__next__", prefix=name.prefix) + # name.replace(n) + pass + elif attr: + # We don't do this transformation if we're assigning to "x.next". + # Unfortunately, it doesn't seem possible to do this in PATTERN, + # so it's being done here. + if is_assign_target(node): + head = results["head"] + if "".join([str(n) for n in head]).strip() == '__builtin__': + self.warning(node, bind_warning) + return + # Omit this: + # attr.replace(Name("__next__")) + elif "global" in results: + self.warning(node, bind_warning) + self.shadowed_next = True + + +### The following functions help test if node is part of an assignment +### target. + +def is_assign_target(node): + assign = find_assign(node) + if assign is None: + return False + + for child in assign.children: + if child.type == token.EQUAL: + return False + elif is_subtree(child, node): + return True + return False + +def find_assign(node): + if node.type == syms.expr_stmt: + return node + if node.type == syms.simple_stmt or node.parent is None: + return None + return find_assign(node.parent) + +def is_subtree(root, node): + if root == node: + return True + return any(is_subtree(c, node) for c in root.children) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_object.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_object.py new file mode 100644 index 000000000..accf2c52e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_object.py @@ -0,0 +1,17 @@ +""" +Fixer that adds ``from builtins import object`` if there is a line +like this: + class Foo(object): +""" + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixObject(fixer_base.BaseFix): + + PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >" + + def transform(self, node, results): + touch_import_top(u'builtins', 'object', node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_oldstr_wrap.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_oldstr_wrap.py new file mode 100644 index 000000000..ad58771d5 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_oldstr_wrap.py @@ -0,0 +1,39 @@ +""" +For the ``future`` package. + +Adds this import line: + + from past.builtins import str as oldstr + +at the top and wraps any unadorned string literals 'abc' or explicit byte-string +literals b'abc' in oldstr() calls so the code has the same behaviour on Py3 as +on Py2.6/2.7. +""" + +from __future__ import unicode_literals +import re +from lib2to3 import fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import syms +from libfuturize.fixer_util import (future_import, touch_import_top, + wrap_in_fn_call) + + +_literal_re = re.compile(r"[^uUrR]?[\'\"]") + + +class FixOldstrWrap(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING" + + def transform(self, node, results): + if node.type == token.STRING: + touch_import_top(u'past.types', u'oldstr', node) + if _literal_re.match(node.value): + new = node.clone() + # Strip any leading space or comments: + # TODO: check: do we really want to do this? + new.prefix = u'' + new.value = u'b' + new.value + wrapped = wrap_in_fn_call("oldstr", [new], prefix=node.prefix) + return wrapped diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_order___future__imports.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_order___future__imports.py new file mode 100644 index 000000000..00d7ef606 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_order___future__imports.py @@ -0,0 +1,36 @@ +""" +UNFINISHED + +Fixer for turning multiple lines like these: + + from __future__ import division + from __future__ import absolute_import + from __future__ import print_function + +into a single line like this: + + from __future__ import (absolute_import, division, print_function) + +This helps with testing of ``futurize``. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixOrderFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 10 + + # def match(self, node): + # """ + # Match only once per file + # """ + # if hasattr(node, 'type') and node.type == syms.file_input: + # return True + # return False + + def transform(self, node, results): + # TODO # write me + pass diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print.py new file mode 100644 index 000000000..247b91b84 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print.py @@ -0,0 +1,94 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for print. + +Change: + "print" into "print()" + "print ..." into "print(...)" + "print(...)" not changed + "print ... ," into "print(..., end=' ')" + "print >>x, ..." into "print(..., file=x)" + +No changes are applied if print_function is imported from __future__ + +""" + +# Local imports +from lib2to3 import patcomp, pytree, fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Name, Call, Comma, String +# from libmodernize import add_future + +parend_expr = patcomp.compile_pattern( + """atom< '(' [arith_expr|atom|power|term|STRING|NAME] ')' >""" + ) + + +class FixPrint(fixer_base.BaseFix): + + BM_compatible = True + + PATTERN = """ + simple_stmt< any* bare='print' any* > | print_stmt + """ + + def transform(self, node, results): + assert results + + bare_print = results.get("bare") + + if bare_print: + # Special-case print all by itself. + bare_print.replace(Call(Name(u"print"), [], + prefix=bare_print.prefix)) + # The "from __future__ import print_function"" declaration is added + # by the fix_print_with_import fixer, so we skip it here. + # add_future(node, u'print_function') + return + assert node.children[0] == Name(u"print") + args = node.children[1:] + if len(args) == 1 and parend_expr.match(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return + + sep = end = file = None + if args and args[-1] == Comma(): + args = args[:-1] + end = " " + if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"): + assert len(args) >= 2 + file = args[1].clone() + args = args[3:] # Strip a possible comma after the file expression + # Now synthesize a print(args, sep=..., end=..., file=...) node. + l_args = [arg.clone() for arg in args] + if l_args: + l_args[0].prefix = u"" + if sep is not None or end is not None or file is not None: + if sep is not None: + self.add_kwarg(l_args, u"sep", String(repr(sep))) + if end is not None: + self.add_kwarg(l_args, u"end", String(repr(end))) + if file is not None: + self.add_kwarg(l_args, u"file", file) + n_stmt = Call(Name(u"print"), l_args) + n_stmt.prefix = node.prefix + + # Note that there are corner cases where adding this future-import is + # incorrect, for example when the file also has a 'print ()' statement + # that was intended to print "()". + # add_future(node, u'print_function') + return n_stmt + + def add_kwarg(self, l_nodes, s_kwd, n_expr): + # XXX All this prefix-setting may lose comments (though rarely) + n_expr.prefix = u"" + n_argument = pytree.Node(self.syms.argument, + (Name(s_kwd), + pytree.Leaf(token.EQUAL, u"="), + n_expr)) + if l_nodes: + l_nodes.append(Comma()) + n_argument.prefix = u" " + l_nodes.append(n_argument) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print_with_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print_with_import.py new file mode 100644 index 000000000..344904610 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_print_with_import.py @@ -0,0 +1,22 @@ +""" +For the ``future`` package. + +Turns any print statements into functions and adds this import line: + + from __future__ import print_function + +at the top to retain compatibility with Python 2.6+. +""" + +from libfuturize.fixes.fix_print import FixPrint +from libfuturize.fixer_util import future_import + +class FixPrintWithImport(FixPrint): + run_order = 7 + def transform(self, node, results): + # Add the __future__ import first. (Otherwise any shebang or encoding + # comment line attached as a prefix to the print statement will be + # copied twice and appear twice.) + future_import(u'print_function', node) + n_stmt = super(FixPrintWithImport, self).transform(node, results) + return n_stmt diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_raise.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_raise.py new file mode 100644 index 000000000..3e8323de2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_raise.py @@ -0,0 +1,73 @@ +"""Fixer for 'raise E, V' + +From Armin Ronacher's ``python-modernize``. + +raise -> raise +raise E -> raise E +raise E, V -> raise E(V) + +raise (((E, E'), E''), E'''), V -> raise E(V) + + +CAVEATS: +1) "raise E, V" will be incorrectly translated if V is an exception + instance. The correct Python 3 idiom is + + raise E from V + + but since we can't detect instance-hood by syntax alone and since + any client code would have to be changed as well, we don't automate + this. +""" +# Author: Collin Winter, Armin Ronacher + +# Local imports +from lib2to3 import pytree, fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Name, Call, is_tuple + +class FixRaise(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + raise_stmt< 'raise' exc=any [',' val=any] > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type == token.STRING: + msg = "Python 3 does not support string exceptions" + self.cannot_convert(node, msg) + return + + # Python 2 supports + # raise ((((E1, E2), E3), E4), E5), V + # as a synonym for + # raise E1, V + # Since Python 3 will not support this, we recurse down any tuple + # literals, always taking the first element. + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.prefix = u" " + + if "val" not in results: + # One-argument raise + new = pytree.Node(syms.raise_stmt, [Name(u"raise"), exc]) + new.prefix = node.prefix + return new + + val = results["val"].clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = u"" + args = [val] + + return pytree.Node(syms.raise_stmt, + [Name(u"raise"), Call(exc, args)], + prefix=node.prefix) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_remove_old__future__imports.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_remove_old__future__imports.py new file mode 100644 index 000000000..9336f75f3 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_remove_old__future__imports.py @@ -0,0 +1,26 @@ +""" +Fixer for removing any of these lines: + + from __future__ import with_statement + from __future__ import nested_scopes + from __future__ import generators + +The reason is that __future__ imports like these are required to be the first +line of code (after docstrings) on Python 2.6+, which can get in the way. + +These imports are always enabled in Python 2.6+, which is the minimum sane +version to target for Py2/3 compatibility. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import remove_future_import + +class FixRemoveOldFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + remove_future_import(u"with_statement", node) + remove_future_import(u"nested_scopes", node) + remove_future_import(u"generators", node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_keep_u.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_keep_u.py new file mode 100644 index 000000000..2e9a4e476 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_keep_u.py @@ -0,0 +1,24 @@ +"""Fixer that changes unicode to str and unichr to chr, but -- unlike the +lib2to3 fix_unicode.py fixer, does not change u"..." into "...". + +The reason is that Py3.3+ supports the u"..." string prefix, and, if +present, the prefix may provide useful information for disambiguating +between byte strings and unicode strings, which is often the hardest part +of the porting task. + +""" + +from lib2to3.pgen2 import token +from lib2to3 import fixer_base + +_mapping = {u"unichr" : u"chr", u"unicode" : u"str"} + +class FixUnicodeKeepU(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "'unicode' | 'unichr'" + + def transform(self, node, results): + if node.type == token.NAME: + new = node.clone() + new.value = _mapping[node.value] + return new diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_literals_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_literals_import.py new file mode 100644 index 000000000..51c50620b --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_unicode_literals_import.py @@ -0,0 +1,18 @@ +""" +Adds this import: + + from __future__ import unicode_literals + +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixUnicodeLiteralsImport(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 9 + + def transform(self, node, results): + future_import(u"unicode_literals", node) diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_xrange_with_import.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_xrange_with_import.py new file mode 100644 index 000000000..c910f8165 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/fixes/fix_xrange_with_import.py @@ -0,0 +1,20 @@ +""" +For the ``future`` package. + +Turns any xrange calls into range calls and adds this import line: + + from builtins import range + +at the top. +""" + +from lib2to3.fixes.fix_xrange import FixXrange + +from libfuturize.fixer_util import touch_import_top + + +class FixXrangeWithImport(FixXrange): + def transform(self, node, results): + result = super(FixXrangeWithImport, self).transform(node, results) + touch_import_top('builtins', 'range', node) + return result diff --git a/.install/.kodi/addons/script.module.future/libs/libfuturize/main.py b/.install/.kodi/addons/script.module.future/libs/libfuturize/main.py new file mode 100644 index 000000000..634c2f25e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libfuturize/main.py @@ -0,0 +1,322 @@ +""" +futurize: automatic conversion to clean 2/3 code using ``python-future`` +====================================================================== + +Like Armin Ronacher's modernize.py, ``futurize`` attempts to produce clean +standard Python 3 code that runs on both Py2 and Py3. + +One pass +-------- + +Use it like this on Python 2 code: + + $ futurize --verbose mypython2script.py + +This will attempt to port the code to standard Py3 code that also +provides Py2 compatibility with the help of the right imports from +``future``. + +To write changes to the files, use the -w flag. + +Two stages +---------- + +The ``futurize`` script can also be called in two separate stages. First: + + $ futurize --stage1 mypython2script.py + +This produces more modern Python 2 code that is not yet compatible with Python +3. The tests should still run and the diff should be uncontroversial to apply to +most Python projects that are willing to drop support for Python 2.5 and lower. + +After this, the recommended approach is to explicitly mark all strings that must +be byte-strings with a b'' prefix and all text (unicode) strings with a u'' +prefix, and then invoke the second stage of Python 2 to 2/3 conversion with:: + + $ futurize --stage2 mypython2script.py + +Stage 2 adds a dependency on ``future``. It converts most remaining Python +2-specific code to Python 3 code and adds appropriate imports from ``future`` +to restore Py2 support. + +The command above leaves all unadorned string literals as native strings +(byte-strings on Py2, unicode strings on Py3). If instead you would like all +unadorned string literals to be promoted to unicode, you can also pass this +flag: + + $ futurize --stage2 --unicode-literals mypython2script.py + +This adds the declaration ``from __future__ import unicode_literals`` to the +top of each file, which implicitly declares all unadorned string literals to be +unicode strings (``unicode`` on Py2). + +All imports +----------- + +The --all-imports option forces adding all ``__future__`` imports, +``builtins`` imports, and standard library aliases, even if they don't +seem necessary for the current state of each module. (This can simplify +testing, and can reduce the need to think about Py2 compatibility when editing +the code further.) + +""" + +from __future__ import (absolute_import, print_function, unicode_literals) +import future.utils +from future import __version__ + +import sys +import logging +import optparse +import os + +from lib2to3.main import warn, StdoutRefactoringTool +from lib2to3 import refactor + +from libfuturize.fixes import (lib2to3_fix_names_stage1, + lib2to3_fix_names_stage2, + libfuturize_fix_names_stage1, + libfuturize_fix_names_stage2) + +fixer_pkg = 'libfuturize.fixes' + + +def main(args=None): + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ + + # Set up option parser + parser = optparse.OptionParser(usage="futurize [options] file|dir ...") + parser.add_option("-V", "--version", action="store_true", + help="Report the version number of futurize") + parser.add_option("-a", "--all-imports", action="store_true", + help="Add all __future__ and future imports to each module") + parser.add_option("-1", "--stage1", action="store_true", + help="Modernize Python 2 code only; no compatibility with Python 3 (or dependency on ``future``)") + parser.add_option("-2", "--stage2", action="store_true", + help="Take modernized (stage1) code and add a dependency on ``future`` to provide Py3 compatibility.") + parser.add_option("-0", "--both-stages", action="store_true", + help="Apply both stages 1 and 2") + parser.add_option("-u", "--unicode-literals", action="store_true", + help="Add ``from __future__ import unicode_literals`` to implicitly convert all unadorned string literals '' into unicode strings") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all.\nEither use '-f division -f metaclass' etc. or use the fully-qualified module name: '-f lib2to3.fixes.fix_types -f libfuturize.fixes.fix_unicode_keep_u'") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a fixer from being run.") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + parser.add_option("-p", "--print-function", action="store_true", + help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files.") + parser.add_option("-o", "--output-dir", action="store", type="str", + default="", help="Put output files in this directory " + "instead of overwriting the input files. Requires -n. " + "For Python >= 2.7 only.") + parser.add_option("-W", "--write-unchanged-files", action="store_true", + help="Also write files even if no changes were required" + " (useful with --output-dir); implies -w.") + parser.add_option("--add-suffix", action="store", type="str", default="", + help="Append this string to all output filenames." + " Requires -n if non-empty. For Python >= 2.7 only." + "ex: --add-suffix='3' will generate .py3 files.") + + # Parse command line arguments + flags = {} + refactor_stdin = False + options, args = parser.parse_args(args) + + if options.write_unchanged_files: + flags["write_unchanged_files"] = True + if not options.write: + warn("--write-unchanged-files/-W implies -w.") + options.write = True + # If we allowed these, the original files would be renamed to backup names + # but not replaced. + if options.output_dir and not options.nobackups: + parser.error("Can't use --output-dir/-o without -n.") + if options.add_suffix and not options.nobackups: + parser.error("Can't use --add-suffix without -n.") + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + # Is this ever necessary? + if options.print_function: + flags["print_function"] = True + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + logger = logging.getLogger('libfuturize.main') + + if options.stage1 or options.stage2: + assert options.both_stages is None + options.both_stages = False + else: + options.both_stages = True + + avail_fixes = set() + + if options.stage1 or options.both_stages: + avail_fixes.update(lib2to3_fix_names_stage1) + avail_fixes.update(libfuturize_fix_names_stage1) + if options.stage2 or options.both_stages: + avail_fixes.update(lib2to3_fix_names_stage2) + avail_fixes.update(libfuturize_fix_names_stage2) + + if options.unicode_literals: + avail_fixes.add('libfuturize.fixes.fix_unicode_literals_import') + + if options.version: + print(__version__) + return 0 + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + # for fixname in sorted(refactor.get_all_fix_names(fixer_pkg)): + for fixname in sorted(avail_fixes): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", + file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) + + extra_fixes = set() + if options.all_imports: + if options.stage1: + prefix = 'libfuturize.fixes.' + extra_fixes.add(prefix + + 'fix_add__future__imports_except_unicode_literals') + else: + # In case the user hasn't run stage1 for some reason: + prefix = 'libpasteurize.fixes.' + extra_fixes.add(prefix + 'fix_add_all__future__imports') + extra_fixes.add(prefix + 'fix_add_future_standard_library_import') + extra_fixes.add(prefix + 'fix_add_all_future_builtins') + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + fixer_names = (requested | extra_fixes) - unwanted_fixes + + input_base_dir = os.path.commonprefix(args) + if (input_base_dir and not input_base_dir.endswith(os.sep) + and not os.path.isdir(input_base_dir)): + # One or more similar names were passed, their directory is the base. + # os.path.commonprefix() is ignorant of path elements, this corrects + # for that weird API. + input_base_dir = os.path.dirname(input_base_dir) + if options.output_dir: + input_base_dir = input_base_dir.rstrip(os.sep) + logger.info('Output in %r will mirror the input directory %r layout.', + options.output_dir, input_base_dir) + + # Initialize the refactoring tool + if future.utils.PY26: + extra_kwargs = {} + else: + extra_kwargs = { + 'append_suffix': options.add_suffix, + 'output_dir': options.output_dir, + 'input_base_dir': input_base_dir, + } + + rt = StdoutRefactoringTool( + sorted(fixer_names), flags, sorted(explicit), + options.nobackups, not options.no_diffs, + **extra_kwargs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, None, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/__init__.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/__init__.py new file mode 100644 index 000000000..4cb1cbcd6 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/__init__.py @@ -0,0 +1 @@ +# empty to make this a package diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/__init__.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/__init__.py new file mode 100644 index 000000000..905aec47e --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/__init__.py @@ -0,0 +1,54 @@ +import sys +from lib2to3 import refactor + +# The original set of these fixes comes from lib3to2 (https://bitbucket.org/amentajo/lib3to2): +fix_names = set([ + 'libpasteurize.fixes.fix_add_all__future__imports', # from __future__ import absolute_import etc. on separate lines + 'libpasteurize.fixes.fix_add_future_standard_library_import', # we force adding this import for now, even if it doesn't seem necessary to the fix_future_standard_library fixer, for ease of testing + # 'libfuturize.fixes.fix_order___future__imports', # consolidates to a single line to simplify testing -- UNFINISHED + 'libpasteurize.fixes.fix_future_builtins', # adds "from future.builtins import *" + 'libfuturize.fixes.fix_future_standard_library', # adds "from future import standard_library" + + 'libpasteurize.fixes.fix_annotations', + # 'libpasteurize.fixes.fix_bitlength', # ints have this in Py2.7 + # 'libpasteurize.fixes.fix_bool', # need a decorator or Mixin + # 'libpasteurize.fixes.fix_bytes', # leave bytes as bytes + # 'libpasteurize.fixes.fix_classdecorator', # available in + # Py2.6+ + # 'libpasteurize.fixes.fix_collections', hmmm ... + # 'libpasteurize.fixes.fix_dctsetcomp', # avail in Py27 + 'libpasteurize.fixes.fix_division', # yes + # 'libpasteurize.fixes.fix_except', # avail in Py2.6+ + # 'libpasteurize.fixes.fix_features', # ? + 'libpasteurize.fixes.fix_fullargspec', + # 'libpasteurize.fixes.fix_funcattrs', + 'libpasteurize.fixes.fix_getcwd', + 'libpasteurize.fixes.fix_imports', # adds "from future import standard_library" + 'libpasteurize.fixes.fix_imports2', + # 'libpasteurize.fixes.fix_input', + # 'libpasteurize.fixes.fix_int', + # 'libpasteurize.fixes.fix_intern', + # 'libpasteurize.fixes.fix_itertools', + 'libpasteurize.fixes.fix_kwargs', # yes, we want this + # 'libpasteurize.fixes.fix_memoryview', + # 'libpasteurize.fixes.fix_metaclass', # write a custom handler for + # this + # 'libpasteurize.fixes.fix_methodattrs', # __func__ and __self__ seem to be defined on Py2.7 already + 'libpasteurize.fixes.fix_newstyle', # yes, we want this: explicit inheritance from object. Without new-style classes in Py2, super() will break etc. + # 'libpasteurize.fixes.fix_next', # use a decorator for this + # 'libpasteurize.fixes.fix_numliterals', # prob not + # 'libpasteurize.fixes.fix_open', # huh? + # 'libpasteurize.fixes.fix_print', # no way + 'libpasteurize.fixes.fix_printfunction', # adds __future__ import print_function + # 'libpasteurize.fixes.fix_raise_', # TODO: get this working! + + # 'libpasteurize.fixes.fix_range', # nope + # 'libpasteurize.fixes.fix_reduce', + # 'libpasteurize.fixes.fix_setliteral', + # 'libpasteurize.fixes.fix_str', + # 'libpasteurize.fixes.fix_super', # maybe, if our magic super() isn't robust enough + 'libpasteurize.fixes.fix_throw', # yes, if Py3 supports it + # 'libpasteurize.fixes.fix_unittest', + 'libpasteurize.fixes.fix_unpacking', # yes, this is useful + # 'libpasteurize.fixes.fix_with' # way out of date + ]) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/feature_base.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/feature_base.py new file mode 100644 index 000000000..c36d9a951 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/feature_base.py @@ -0,0 +1,57 @@ +u""" +Base classes for features that are backwards-incompatible. + +Usage: +features = Features() +features.add(Feature("py3k_feature", "power< 'py3k' any* >", "2.7")) +PATTERN = features.PATTERN +""" + +pattern_unformatted = u"%s=%s" # name=pattern, for dict lookups +message_unformatted = u""" +%s is only supported in Python %s and above.""" + +class Feature(object): + u""" + A feature has a name, a pattern, and a minimum version of Python 2.x + required to use the feature (or 3.x if there is no backwards-compatible + version of 2.x) + """ + def __init__(self, name, PATTERN, version): + self.name = name + self._pattern = PATTERN + self.version = version + + def message_text(self): + u""" + Format the above text with the name and minimum version required. + """ + return message_unformatted % (self.name, self.version) + +class Features(set): + u""" + A set of features that generates a pattern for the features it contains. + This set will act like a mapping in that we map names to patterns. + """ + mapping = {} + + def update_mapping(self): + u""" + Called every time we care about the mapping of names to features. + """ + self.mapping = dict([(f.name, f) for f in iter(self)]) + + @property + def PATTERN(self): + u""" + Uses the mapping of names to features to return a PATTERN suitable + for using the lib2to3 patcomp. + """ + self.update_mapping() + return u" |\n".join([pattern_unformatted % (f.name, f._pattern) for f in iter(self)]) + + def __getitem__(self, key): + u""" + Implement a simple mapping to get patterns from names. + """ + return self.mapping[key] diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all__future__imports.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all__future__imports.py new file mode 100644 index 000000000..378979461 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all__future__imports.py @@ -0,0 +1,24 @@ +""" +Fixer for adding: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + +This is done when converting from Py3 to both Py3/Py2. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixAddAllFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + future_import(u"unicode_literals", node) + future_import(u"print_function", node) + future_import(u"division", node) + future_import(u"absolute_import", node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all_future_builtins.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all_future_builtins.py new file mode 100644 index 000000000..22911bada --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_all_future_builtins.py @@ -0,0 +1,37 @@ +""" +For the ``future`` package. + +Adds this import line:: + + from builtins import (ascii, bytes, chr, dict, filter, hex, input, + int, list, map, next, object, oct, open, pow, + range, round, str, super, zip) + +to a module, irrespective of whether each definition is used. + +Adds these imports after any other imports (in an initial block of them). +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixAddAllFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + # import_str = """(ascii, bytes, chr, dict, filter, hex, input, + # int, list, map, next, object, oct, open, pow, + # range, round, str, super, zip)""" + touch_import_top(u'builtins', '*', node) + + # builtins = """ascii bytes chr dict filter hex input + # int list map next object oct open pow + # range round str super zip""" + # for builtin in sorted(builtins.split(), reverse=True): + # touch_import_top(u'builtins', builtin, node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_future_standard_library_import.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_future_standard_library_import.py new file mode 100644 index 000000000..0778406a8 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_add_future_standard_library_import.py @@ -0,0 +1,23 @@ +""" +For the ``future`` package. + +Adds this import line: + + from future import standard_library + +after any __future__ imports but before any other imports. Doesn't actually +change the imports to Py3 style. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import touch_import_top + +class FixAddFutureStandardLibraryImport(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 8 + + def transform(self, node, results): + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', node) + # TODO: also add standard_library.install_hooks() diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_annotations.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_annotations.py new file mode 100644 index 000000000..884b67411 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_annotations.py @@ -0,0 +1,48 @@ +u""" +Fixer to remove function annotations +""" + +from lib2to3 import fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import syms + +warning_text = u"Removing function annotations completely." + +def param_without_annotations(node): + return node.children[0] + +class FixAnnotations(fixer_base.BaseFix): + + warned = False + + def warn_once(self, node, reason): + if not self.warned: + self.warned = True + self.warning(node, reason=reason) + + PATTERN = u""" + funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* > + """ + + def transform(self, node, results): + u""" + This just strips annotations from the funcdef completely. + """ + params = results.get(u"params") + ret = results.get(u"ret") + if ret is not None: + assert ret.prev_sibling.type == token.RARROW, u"Invalid return annotation" + self.warn_once(node, reason=warning_text) + ret.prev_sibling.remove() + ret.remove() + if params is None: return + if params.type == syms.typedargslist: + # more than one param in a typedargslist + for param in params.children: + if param.type == syms.tname: + self.warn_once(node, reason=warning_text) + param.replace(param_without_annotations(param)) + elif params.type == syms.tname: + # one param + self.warn_once(node, reason=warning_text) + params.replace(param_without_annotations(params)) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_division.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_division.py new file mode 100644 index 000000000..6a048710f --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_division.py @@ -0,0 +1,28 @@ +u""" +Fixer for division: from __future__ import division if needed +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import token, future_import + +def match_division(node): + u""" + __future__.division redefines the meaning of a single slash for division, + so we match that and only that. + """ + slash = token.SLASH + return node.type == slash and not node.next_sibling.type == slash and \ + not node.prev_sibling.type == slash + +class FixDivision(fixer_base.BaseFix): + run_order = 4 # this seems to be ignored? + + def match(self, node): + u""" + Since the tree needs to be fixed once and only once if and only if it + matches, then we can start discarding matches after we make the first. + """ + return match_division(node) + + def transform(self, node, results): + future_import(u"division", node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_features.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_features.py new file mode 100644 index 000000000..52630f982 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_features.py @@ -0,0 +1,86 @@ +u""" +Warn about features that are not present in Python 2.5, giving a message that +points to the earliest version of Python 2.x (or 3.x, if none) that supports it +""" + +from .feature_base import Feature, Features +from lib2to3 import fixer_base + +FEATURES = [ + #(FeatureName, + # FeaturePattern, + # FeatureMinVersion, + #), + (u"memoryview", + u"power < 'memoryview' trailer < '(' any* ')' > any* >", + u"2.7", + ), + (u"numbers", + u"""import_from< 'from' 'numbers' 'import' any* > | + import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""", + u"2.6", + ), + (u"abc", + u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > | + import_from< 'from' 'abc' 'import' any* >""", + u"2.6", + ), + (u"io", + u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > | + import_from< 'from' 'io' 'import' any* >""", + u"2.6", + ), + (u"bin", + u"power< 'bin' trailer< '(' any* ')' > any* >", + u"2.6", + ), + (u"formatting", + u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >", + u"2.6", + ), + (u"nonlocal", + u"global_stmt< 'nonlocal' any* >", + u"3.0", + ), + (u"with_traceback", + u"trailer< '.' 'with_traceback' >", + u"3.0", + ), +] + +class FixFeatures(fixer_base.BaseFix): + + run_order = 9 # Wait until all other fixers have run to check for these + + # To avoid spamming, we only want to warn for each feature once. + features_warned = set() + + # Build features from the list above + features = Features([Feature(name, pattern, version) for \ + name, pattern, version in FEATURES]) + + PATTERN = features.PATTERN + + def match(self, node): + to_ret = super(FixFeatures, self).match(node) + # We want the mapping only to tell us the node's specific information. + try: + del to_ret[u'node'] + except Exception: + # We want it to delete the 'node' from the results + # if it's there, so we don't care if it fails for normal reasons. + pass + return to_ret + + def transform(self, node, results): + for feature_name in results: + if feature_name in self.features_warned: + continue + else: + curr_feature = self.features[feature_name] + if curr_feature.version >= u"3": + fail = self.cannot_convert + else: + fail = self.warning + fail(node, reason=curr_feature.message_text()) + self.features_warned.add(feature_name) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_fullargspec.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_fullargspec.py new file mode 100644 index 000000000..4bd37e151 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_fullargspec.py @@ -0,0 +1,16 @@ +u""" +Fixer for getfullargspec -> getargspec +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + +warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent." + +class FixFullargspec(fixer_base.BaseFix): + + PATTERN = u"'getfullargspec'" + + def transform(self, node, results): + self.warning(node, warn_msg) + return Name(u"getargspec", prefix=node.prefix) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_future_builtins.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_future_builtins.py new file mode 100644 index 000000000..684967998 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_future_builtins.py @@ -0,0 +1,46 @@ +""" +Adds this import line: + + from builtins import XYZ + +for each of the functions XYZ that is used in the module. +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base +from lib2to3.pygram import python_symbols as syms +from lib2to3.fixer_util import Name, Call, in_special_context + +from libfuturize.fixer_util import touch_import_top + +# All builtins are: +# from future.builtins.iterators import (filter, map, zip) +# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) +# from future.types import (bytes, dict, int, range, str) +# We don't need isinstance any more. + +replaced_builtins = '''filter map zip + ascii chr hex input next oct open round super + bytes dict int range str'''.split() + +expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins]) + + +class FixFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + # Currently we only match uses as a function. This doesn't match e.g.: + # if isinstance(s, str): + # ... + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'builtins', name.value, node) + # name.replace(Name(u"input", prefix=name.prefix)) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_getcwd.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_getcwd.py new file mode 100644 index 000000000..9b7f002b3 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_getcwd.py @@ -0,0 +1,26 @@ +u""" +Fixer for os.getcwd() -> os.getcwdu(). +Also warns about "from os import getcwd", suggesting the above form. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + +class FixGetcwd(fixer_base.BaseFix): + + PATTERN = u""" + power< 'os' trailer< dot='.' name='getcwd' > any* > + | + import_from< 'from' 'os' 'import' bad='getcwd' > + """ + + def transform(self, node, results): + if u"name" in results: + name = results[u"name"] + name.replace(Name(u"getcwdu", prefix=name.prefix)) + elif u"bad" in results: + # Can't convert to getcwdu and then expect to catch every use. + self.cannot_convert(node, u"import os, use os.getcwd() instead.") + return + else: + raise ValueError(u"For some reason, the pattern matcher failed.") diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports.py new file mode 100644 index 000000000..2d6718f16 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports.py @@ -0,0 +1,112 @@ +u""" +Fixer for standard library imports renamed in Python 3 +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import +from lib2to3.pygram import python_symbols as syms +from lib2to3.pgen2 import token +from lib2to3.pytree import Node, Leaf + +from libfuturize.fixer_util import touch_import_top +# from ..fixer_util import NameImport + +# used in simple_mapping_to_pattern() +MAPPING = {u"reprlib": u"repr", + u"winreg": u"_winreg", + u"configparser": u"ConfigParser", + u"copyreg": u"copy_reg", + u"queue": u"Queue", + u"socketserver": u"SocketServer", + u"_markupbase": u"markupbase", + u"test.support": u"test.test_support", + u"dbm.bsd": u"dbhash", + u"dbm.ndbm": u"dbm", + u"dbm.dumb": u"dumbdbm", + u"dbm.gnu": u"gdbm", + u"html.parser": u"HTMLParser", + u"html.entities": u"htmlentitydefs", + u"http.client": u"httplib", + u"http.cookies": u"Cookie", + u"http.cookiejar": u"cookielib", +# "tkinter": "Tkinter", + u"tkinter.dialog": u"Dialog", + u"tkinter._fix": u"FixTk", + u"tkinter.scrolledtext": u"ScrolledText", + u"tkinter.tix": u"Tix", + u"tkinter.constants": u"Tkconstants", + u"tkinter.dnd": u"Tkdnd", + u"tkinter.__init__": u"Tkinter", + u"tkinter.colorchooser": u"tkColorChooser", + u"tkinter.commondialog": u"tkCommonDialog", + u"tkinter.font": u"tkFont", + u"tkinter.ttk": u"ttk", + u"tkinter.messagebox": u"tkMessageBox", + u"tkinter.turtle": u"turtle", + u"urllib.robotparser": u"robotparser", + u"xmlrpc.client": u"xmlrpclib", + u"builtins": u"__builtin__", +} + +# generic strings to help build patterns +# these variables mean (with http.client.HTTPConnection as an example): +# name = http +# attr = client +# used = HTTPConnection +# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match) + +# helps match 'queue', as in 'from queue import ...' +simple_name_match = u"name='%s'" +# helps match 'client', to be used if client has been imported from http +subname_match = u"attr='%s'" +# helps match 'http.client', as in 'import urllib.request' +dotted_name_match = u"dotted_name=dotted_name< %s '.' %s >" +# helps match 'queue', as in 'queue.Queue(...)' +power_onename_match = u"%s" +# helps match 'http.client', as in 'http.client.HTTPConnection(...)' +power_twoname_match = u"power< %s trailer< '.' %s > any* >" +# helps match 'client.HTTPConnection', if 'client' has been imported from http +power_subname_match = u"power< %s any* >" +# helps match 'from http.client import HTTPConnection' +from_import_match = u"from_import=import_from< 'from' %s 'import' imported=any >" +# helps match 'from http import client' +from_import_submod_match = u"from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >" +# helps match 'import urllib.request' +name_import_match = u"name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >" +# helps match 'import http.client, winreg' +multiple_name_import_match = u"name_import=import_name< 'import' dotted_as_names< names=any* > >" + +def all_patterns(name): + u""" + Accepts a string and returns a pattern of possible patterns involving that name + Called by simple_mapping_to_pattern for each name in the mapping it receives. + """ + + # i_ denotes an import-like node + # u_ denotes a node that appears to be a usage of the name + if u'.' in name: + name, attr = name.split(u'.', 1) + simple_name = simple_name_match % (name) + simple_attr = subname_match % (attr) + dotted_name = dotted_name_match % (simple_name, simple_attr) + i_from = from_import_match % (dotted_name) + i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr) + i_name = name_import_match % (dotted_name, dotted_name) + u_name = power_twoname_match % (simple_name, simple_attr) + u_subname = power_subname_match % (simple_attr) + return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname)) + else: + simple_name = simple_name_match % (name) + i_name = name_import_match % (simple_name, simple_name) + i_from = from_import_match % (simple_name) + u_name = power_onename_match % (simple_name) + return u' | \n'.join((i_name, i_from, u_name)) + + +class FixImports(fixer_base.BaseFix): + + PATTERN = u' | \n'.join([all_patterns(name) for name in MAPPING]) + PATTERN = u' | \n'.join((PATTERN, multiple_name_import_match)) + + def transform(self, node, results): + touch_import_top(u'future', u'standard_library', node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports2.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports2.py new file mode 100644 index 000000000..70444e9e0 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_imports2.py @@ -0,0 +1,174 @@ +u""" +Fixer for complicated imports +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma +from libfuturize.fixer_util import touch_import_top + + +TK_BASE_NAMES = (u'ACTIVE', u'ALL', u'ANCHOR', u'ARC',u'BASELINE', u'BEVEL', u'BOTH', + u'BOTTOM', u'BROWSE', u'BUTT', u'CASCADE', u'CENTER', u'CHAR', + u'CHECKBUTTON', u'CHORD', u'COMMAND', u'CURRENT', u'DISABLED', + u'DOTBOX', u'E', u'END', u'EW', u'EXCEPTION', u'EXTENDED', u'FALSE', + u'FIRST', u'FLAT', u'GROOVE', u'HIDDEN', u'HORIZONTAL', u'INSERT', + u'INSIDE', u'LAST', u'LEFT', u'MITER', u'MOVETO', u'MULTIPLE', u'N', + u'NE', u'NO', u'NONE', u'NORMAL', u'NS', u'NSEW', u'NUMERIC', u'NW', + u'OFF', u'ON', u'OUTSIDE', u'PAGES', u'PIESLICE', u'PROJECTING', + u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT', + u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST', + u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes', + u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE', + u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES', + u'wantobjects') + +PY2MODULES = { + u'urllib2' : ( + u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler', + u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler', + u'FTPHandler', u'FileHandler', u'HTTPBasicAuthHandler', + u'HTTPCookieProcessor', u'HTTPDefaultErrorHandler', + u'HTTPDigestAuthHandler', u'HTTPError', u'HTTPErrorProcessor', + u'HTTPHandler', u'HTTPPasswordMgr', + u'HTTPPasswordMgrWithDefaultRealm', u'HTTPRedirectHandler', + u'HTTPSHandler', u'OpenerDirector', u'ProxyBasicAuthHandler', + u'ProxyDigestAuthHandler', u'ProxyHandler', u'Request', + u'StringIO', u'URLError', u'UnknownHandler', u'addinfourl', + u'build_opener', u'install_opener', u'parse_http_list', + u'parse_keqv_list', u'randombytes', u'request_host', u'urlopen'), + u'urllib' : ( + u'ContentTooShortError', u'FancyURLopener',u'URLopener', + u'basejoin', u'ftperrors', u'getproxies', + u'getproxies_environment', u'localhost', u'pathname2url', + u'quote', u'quote_plus', u'splitattr', u'splithost', + u'splitnport', u'splitpasswd', u'splitport', u'splitquery', + u'splittag', u'splittype', u'splituser', u'splitvalue', + u'thishost', u'unquote', u'unquote_plus', u'unwrap', + u'url2pathname', u'urlcleanup', u'urlencode', u'urlopen', + u'urlretrieve',), + u'urlparse' : ( + u'parse_qs', u'parse_qsl', u'urldefrag', u'urljoin', + u'urlparse', u'urlsplit', u'urlunparse', u'urlunsplit'), + u'dbm' : ( + u'ndbm', u'gnu', u'dumb'), + u'anydbm' : ( + u'error', u'open'), + u'whichdb' : ( + u'whichdb',), + u'BaseHTTPServer' : ( + u'BaseHTTPRequestHandler', u'HTTPServer'), + u'CGIHTTPServer' : ( + u'CGIHTTPRequestHandler',), + u'SimpleHTTPServer' : ( + u'SimpleHTTPRequestHandler',), + u'FileDialog' : TK_BASE_NAMES + ( + u'FileDialog', u'LoadFileDialog', u'SaveFileDialog', + u'dialogstates', u'test'), + u'tkFileDialog' : ( + u'Directory', u'Open', u'SaveAs', u'_Dialog', u'askdirectory', + u'askopenfile', u'askopenfilename', u'askopenfilenames', + u'askopenfiles', u'asksaveasfile', u'asksaveasfilename'), + u'SimpleDialog' : TK_BASE_NAMES + ( + u'SimpleDialog',), + u'tkSimpleDialog' : TK_BASE_NAMES + ( + u'askfloat', u'askinteger', u'askstring', u'Dialog'), + u'SimpleXMLRPCServer' : ( + u'CGIXMLRPCRequestHandler', u'SimpleXMLRPCDispatcher', + u'SimpleXMLRPCRequestHandler', u'SimpleXMLRPCServer', + u'list_public_methods', u'remove_duplicates', + u'resolve_dotted_attribute'), + u'DocXMLRPCServer' : ( + u'DocCGIXMLRPCRequestHandler', u'DocXMLRPCRequestHandler', + u'DocXMLRPCServer', u'ServerHTMLDoc',u'XMLRPCDocGenerator'), + } + +MAPPING = { u'urllib.request' : + (u'urllib2', u'urllib'), + u'urllib.error' : + (u'urllib2', u'urllib'), + u'urllib.parse' : + (u'urllib2', u'urllib', u'urlparse'), + u'dbm.__init__' : + (u'anydbm', u'whichdb'), + u'http.server' : + (u'CGIHTTPServer', u'SimpleHTTPServer', u'BaseHTTPServer'), + u'tkinter.filedialog' : + (u'tkFileDialog', u'FileDialog'), + u'tkinter.simpledialog' : + (u'tkSimpleDialog', u'SimpleDialog'), + u'xmlrpc.server' : + (u'DocXMLRPCServer', u'SimpleXMLRPCServer'), + } + +# helps match 'http', as in 'from http.server import ...' +simple_name = u"name='%s'" +# helps match 'server', as in 'from http.server import ...' +simple_attr = u"attr='%s'" +# helps match 'HTTPServer', as in 'from http.server import HTTPServer' +simple_using = u"using='%s'" +# helps match 'urllib.request', as in 'import urllib.request' +dotted_name = u"dotted_name=dotted_name< %s '.' %s >" +# helps match 'http.server', as in 'http.server.HTTPServer(...)' +power_twoname = u"pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >" +# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)' +power_onename = u"pow=power< %s trailer< '.' using=any > any* >" +# helps match 'from http.server import HTTPServer' +# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler' +# also helps match 'from http.server import *' +from_import = u"from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >" +# helps match 'import urllib.request' +name_import = u"name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >" + +############# +# WON'T FIX # +############# + +# helps match 'import urllib.request as name' +name_import_rename = u"name_import_rename=dotted_as_name< %s 'as' renamed=any >" +# helps match 'from http import server' +from_import_rename = u"from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >" + + +def all_modules_subpattern(): + u""" + Builds a pattern for all toplevel names + (urllib, http, etc) + """ + names_dot_attrs = [mod.split(u".") for mod in MAPPING] + ret = u"( " + u" | ".join([dotted_name % (simple_name % (mod[0]), + simple_attr % (mod[1])) for mod in names_dot_attrs]) + ret += u" | " + ret += u" | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == u"__init__"]) + u" )" + return ret + + +def build_import_pattern(mapping1, mapping2): + u""" + mapping1: A dict mapping py3k modules to all possible py2k replacements + mapping2: A dict mapping py2k modules to the things they do + This builds a HUGE pattern to match all ways that things can be imported + """ + # py3k: urllib.request, py2k: ('urllib2', 'urllib') + yield from_import % (all_modules_subpattern()) + for py3k, py2k in mapping1.items(): + name, attr = py3k.split(u'.') + s_name = simple_name % (name) + s_attr = simple_attr % (attr) + d_name = dotted_name % (s_name, s_attr) + yield name_import % (d_name) + yield power_twoname % (s_name, s_attr) + if attr == u'__init__': + yield name_import % (s_name) + yield power_onename % (s_name) + yield name_import_rename % (d_name) + yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr) + + +class FixImports2(fixer_base.BaseFix): + + run_order = 4 + + PATTERN = u" | \n".join(build_import_pattern(MAPPING, PY2MODULES)) + + def transform(self, node, results): + touch_import_top(u'future', u'standard_library', node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_kwargs.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_kwargs.py new file mode 100644 index 000000000..290f991ee --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_kwargs.py @@ -0,0 +1,147 @@ +u""" +Fixer for Python 3 function parameter syntax +This fixer is rather sensitive to incorrect py3k syntax. +""" + +# Note: "relevant" parameters are parameters following the first STAR in the list. + +from lib2to3 import fixer_base +from lib2to3.fixer_util import token, String, Newline, Comma, Name +from libfuturize.fixer_util import indentation, suitify, DoubleStar + +_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']" +_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s" +_else_template = u"else: %(name)s = %(default)s" +_kwargs_default_name = u"_3to2kwargs" + +def gen_params(raw_params): + u""" + Generator that yields tuples of (name, default_value) for each parameter in the list + If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None')) + """ + assert raw_params[0].type == token.STAR and len(raw_params) > 2 + curr_idx = 2 # the first place a keyword-only parameter name can be is index 2 + max_idx = len(raw_params) + while curr_idx < max_idx: + curr_item = raw_params[curr_idx] + prev_item = curr_item.prev_sibling + if curr_item.type != token.NAME: + curr_idx += 1 + continue + if prev_item is not None and prev_item.type == token.DOUBLESTAR: + break + name = curr_item.value + nxt = curr_item.next_sibling + if nxt is not None and nxt.type == token.EQUAL: + default_value = nxt.next_sibling + curr_idx += 2 + else: + default_value = None + yield (name, default_value) + curr_idx += 1 + +def remove_params(raw_params, kwargs_default=_kwargs_default_name): + u""" + Removes all keyword-only args from the params list and a bare star, if any. + Does not add the kwargs dict if needed. + Returns True if more action is needed, False if not + (more action is needed if no kwargs dict exists) + """ + assert raw_params[0].type == token.STAR + if raw_params[1].type == token.COMMA: + raw_params[0].remove() + raw_params[1].remove() + kw_params = raw_params[2:] + else: + kw_params = raw_params[3:] + for param in kw_params: + if param.type != token.DOUBLESTAR: + param.remove() + else: + return False + else: + return True + +def needs_fixing(raw_params, kwargs_default=_kwargs_default_name): + u""" + Returns string with the name of the kwargs dict if the params after the first star need fixing + Otherwise returns empty string + """ + found_kwargs = False + needs_fix = False + + for t in raw_params[2:]: + if t.type == token.COMMA: + # Commas are irrelevant at this stage. + continue + elif t.type == token.NAME and not found_kwargs: + # Keyword-only argument: definitely need to fix. + needs_fix = True + elif t.type == token.NAME and found_kwargs: + # Return 'foobar' of **foobar, if needed. + return t.value if needs_fix else u'' + elif t.type == token.DOUBLESTAR: + # Found either '*' from **foobar. + found_kwargs = True + else: + # Never found **foobar. Return a synthetic name, if needed. + return kwargs_default if needs_fix else u'' + +class FixKwargs(fixer_base.BaseFix): + + run_order = 7 # Run after function annotations are removed + + PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >" + + def transform(self, node, results): + params_rawlist = results[u"params"] + for i, item in enumerate(params_rawlist): + if item.type == token.STAR: + params_rawlist = params_rawlist[i:] + break + else: + return + # params is guaranteed to be a list starting with *. + # if fixing is needed, there will be at least 3 items in this list: + # [STAR, COMMA, NAME] is the minimum that we need to worry about. + new_kwargs = needs_fixing(params_rawlist) + # new_kwargs is the name of the kwargs dictionary. + if not new_kwargs: + return + suitify(node) + + # At this point, params_rawlist is guaranteed to be a list + # beginning with a star that includes at least one keyword-only param + # e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or + # [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME] + + # Anatomy of a funcdef: ['def', 'name', parameters, ':', suite] + # Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts] + # We need to insert our new stuff before the first_stmt and change the + # first_stmt's prefix. + + suite = node.children[4] + first_stmt = suite.children[2] + ident = indentation(first_stmt) + + for name, default_value in gen_params(params_rawlist): + if default_value is None: + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident)) + else: + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident)) + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident)) + first_stmt.prefix = ident + suite.children[2].prefix = u"" + + # Now, we need to fix up the list of params. + + must_add_kwargs = remove_params(params_rawlist) + if must_add_kwargs: + arglist = results[u'arglist'] + if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA: + arglist.append_child(Comma()) + arglist.append_child(DoubleStar(prefix=u" ")) + arglist.append_child(Name(new_kwargs)) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_memoryview.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_memoryview.py new file mode 100644 index 000000000..a20f6f3f2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_memoryview.py @@ -0,0 +1,21 @@ +u""" +Fixer for memoryview(s) -> buffer(s). +Explicit because some memoryview methods are invalid on buffer objects. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + + +class FixMemoryview(fixer_base.BaseFix): + + explicit = True # User must specify that they want this. + + PATTERN = u""" + power< name='memoryview' trailer< '(' [any] ')' > + rest=any* > + """ + + def transform(self, node, results): + name = results[u"name"] + name.replace(Name(u"buffer", prefix=name.prefix)) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_metaclass.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_metaclass.py new file mode 100644 index 000000000..52dd1d145 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_metaclass.py @@ -0,0 +1,78 @@ +u""" +Fixer for (metaclass=X) -> __metaclass__ = X +Some semantics (see PEP 3115) may be altered in the translation.""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, syms, Node, Leaf, Newline, find_root +from lib2to3.pygram import token +from libfuturize.fixer_util import indentation, suitify +# from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify + +def has_metaclass(parent): + results = None + for node in parent.children: + kids = node.children + if node.type == syms.argument: + if kids[0] == Leaf(token.NAME, u"metaclass") and \ + kids[1] == Leaf(token.EQUAL, u"=") and \ + kids[2]: + #Hack to avoid "class X(=):" with this case. + results = [node] + kids + break + elif node.type == syms.arglist: + # Argument list... loop through it looking for: + # Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)] + for child in node.children: + if results: break + if child.type == token.COMMA: + #Store the last comma, which precedes the metaclass + comma = child + elif type(child) == Node: + meta = equal = name = None + for arg in child.children: + if arg == Leaf(token.NAME, u"metaclass"): + #We have the (metaclass) part + meta = arg + elif meta and arg == Leaf(token.EQUAL, u"="): + #We have the (metaclass=) part + equal = arg + elif meta and equal: + #Here we go, we have (metaclass=X) + name = arg + results = (comma, meta, equal, name) + break + return results + + +class FixMetaclass(fixer_base.BaseFix): + + PATTERN = u""" + classdef + """ + + def transform(self, node, results): + meta_results = has_metaclass(node) + if not meta_results: return + for meta in meta_results: + meta.remove() + target = Leaf(token.NAME, u"__metaclass__") + equal = Leaf(token.EQUAL, u"=", prefix=u" ") + # meta is the last item in what was returned by has_metaclass(): name + name = meta + name.prefix = u" " + stmt_node = Node(syms.atom, [target, equal, name]) + + suitify(node) + for item in node.children: + if item.type == syms.suite: + for stmt in item.children: + if stmt.type == token.INDENT: + # Insert, in reverse order, the statement, a newline, + # and an indent right after the first indented line + loc = item.children.index(stmt) + 1 + # Keep consistent indentation form + ident = Leaf(token.INDENT, stmt.value) + item.insert_child(loc, ident) + item.insert_child(loc, Newline()) + item.insert_child(loc, stmt_node) + break diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_newstyle.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_newstyle.py new file mode 100644 index 000000000..cc6b3adcb --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_newstyle.py @@ -0,0 +1,33 @@ +u""" +Fixer for "class Foo: ..." -> "class Foo(object): ..." +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import LParen, RParen, Name + +from libfuturize.fixer_util import touch_import_top + + +def insert_object(node, idx): + node.insert_child(idx, RParen()) + node.insert_child(idx, Name(u"object")) + node.insert_child(idx, LParen()) + +class FixNewstyle(fixer_base.BaseFix): + + # Match: + # class Blah: + # and: + # class Blah(): + + PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >" + + def transform(self, node, results): + colon = results[u"colon"] + idx = node.children.index(colon) + if (node.children[idx-2].value == '(' and + node.children[idx-1].value == ')'): + del node.children[idx-2:idx] + idx -= 2 + insert_object(node, idx) + touch_import_top(u'builtins', 'object', node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_next.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_next.py new file mode 100644 index 000000000..9ecb6c043 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_next.py @@ -0,0 +1,43 @@ +u""" +Fixer for: +it.__next__() -> it.next(). +next(it) -> it.next(). +""" + +from lib2to3.pgen2 import token +from lib2to3.pygram import python_symbols as syms +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, Call, find_binding, Attr + +bind_warning = u"Calls to builtin next() possibly shadowed by global binding" + + +class FixNext(fixer_base.BaseFix): + + PATTERN = u""" + power< base=any+ trailer< '.' attr='__next__' > any* > + | + power< head='next' trailer< '(' arg=any ')' > any* > + | + classdef< 'class' base=any+ ':' + suite< any* + funcdef< 'def' + attr='__next__' + parameters< '(' NAME ')' > any+ > + any* > > + """ + + def transform(self, node, results): + assert results + + base = results.get(u"base") + attr = results.get(u"attr") + head = results.get(u"head") + arg_ = results.get(u"arg") + if arg_: + arg = arg_.clone() + head.replace(Attr(Name(unicode(arg),prefix=head.prefix), + Name(u"next"))) + arg_.remove() + elif base: + attr.replace(Name(u"next", prefix=attr.prefix)) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_printfunction.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_printfunction.py new file mode 100644 index 000000000..a2a6e0843 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_printfunction.py @@ -0,0 +1,17 @@ +u""" +Fixer for print: from __future__ import print_function. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixPrintfunction(fixer_base.BaseFix): + + # explicit = True + + PATTERN = u""" + power< 'print' trailer < '(' any* ')' > any* > + """ + + def transform(self, node, results): + future_import(u"print_function", node) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise.py new file mode 100644 index 000000000..9c9c192f8 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise.py @@ -0,0 +1,25 @@ +u"""Fixer for 'raise E(V).with_traceback(T)' -> 'raise E, V, T'""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Comma, Node, Leaf, token, syms + +class FixRaise(fixer_base.BaseFix): + + PATTERN = u""" + raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] + [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" + + def transform(self, node, results): + name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) + chain = results.get(u"chain") + if chain is not None: + self.warning(node, u"explicit exception chaining is not supported in Python 2") + chain.prev_sibling.remove() + chain.remove() + if trc is not None: + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), + val.clone(), Comma(), trc.clone()] + raise_stmt = Node(syms.raise_stmt, kids) + node.replace(raise_stmt) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise_.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise_.py new file mode 100644 index 000000000..0f020c454 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_raise_.py @@ -0,0 +1,35 @@ +u"""Fixer for + raise E(V).with_traceback(T) + to: + from future.utils import raise_ + ... + raise_(E, V, T) + +TODO: FIXME!! + +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Comma, Node, Leaf, token, syms + +class FixRaise(fixer_base.BaseFix): + + PATTERN = u""" + raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] + [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" + + def transform(self, node, results): + FIXME + name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) + chain = results.get(u"chain") + if chain is not None: + self.warning(node, u"explicit exception chaining is not supported in Python 2") + chain.prev_sibling.remove() + chain.remove() + if trc is not None: + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), + val.clone(), Comma(), trc.clone()] + raise_stmt = Node(syms.raise_stmt, kids) + node.replace(raise_stmt) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_throw.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_throw.py new file mode 100644 index 000000000..c0feed1ea --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_throw.py @@ -0,0 +1,23 @@ +u"""Fixer for 'g.throw(E(V).with_traceback(T))' -> 'g.throw(E, V, T)'""" + +from lib2to3 import fixer_base +from lib2to3.pytree import Node, Leaf +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Comma + +class FixThrow(fixer_base.BaseFix): + + PATTERN = u""" + power< any trailer< '.' 'throw' > + trailer< '(' args=power< exc=any trailer< '(' val=any* ')' > + trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' > > ')' > > + """ + + def transform(self, node, results): + syms = self.syms + exc, val, trc = (results[u"exc"], results[u"val"], results[u"trc"]) + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [exc.clone(), Comma(), val.clone(), Comma(), trc.clone()] + args = results[u"args"] + args.children = kids diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_unpacking.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_unpacking.py new file mode 100644 index 000000000..c2d3207a2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/fixes/fix_unpacking.py @@ -0,0 +1,120 @@ +u""" +Fixer for: +(a,)* *b (,c)* [,] = s +for (a,)* *b (,c)* [,] in d: ... +""" + +from lib2to3 import fixer_base +from itertools import count +from lib2to3.fixer_util import (Assign, Comma, Call, Newline, Name, + Number, token, syms, Node, Leaf) +from libfuturize.fixer_util import indentation, suitify, commatize +# from libfuturize.fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf + +def assignment_source(num_pre, num_post, LISTNAME, ITERNAME): + u""" + Accepts num_pre and num_post, which are counts of values + before and after the starg (not including the starg) + Returns a source fit for Assign() from fixer_util + """ + children = [] + pre = unicode(num_pre) + post = unicode(num_post) + # This code builds the assignment source from lib2to3 tree primitives. + # It's not very readable, but it seems like the most correct way to do it. + if num_pre > 0: + pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])]) + children.append(pre_part) + children.append(Leaf(token.PLUS, u"+", prefix=u" ")) + main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])]) + children.append(main_part) + if num_post > 0: + children.append(Leaf(token.PLUS, u"+", prefix=u" ")) + post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])]) + children.append(post_part) + source = Node(syms.arith_expr, children) + return source + +class FixUnpacking(fixer_base.BaseFix): + + PATTERN = u""" + expl=expr_stmt< testlist_star_expr< + pre=(any ',')* + star_expr< '*' name=NAME > + post=(',' any)* [','] > '=' source=any > | + impl=for_stmt< 'for' lst=exprlist< + pre=(any ',')* + star_expr< '*' name=NAME > + post=(',' any)* [','] > 'in' it=any ':' suite=any>""" + + def fix_explicit_context(self, node, results): + pre, name, post, source = (results.get(n) for n in (u"pre", u"name", u"post", u"source")) + pre = [n.clone() for n in pre if n.type == token.NAME] + name.prefix = u" " + post = [n.clone() for n in post if n.type == token.NAME] + target = [n.clone() for n in commatize(pre + [name.clone()] + post)] + # to make the special-case fix for "*z, = ..." correct with the least + # amount of modification, make the left-side into a guaranteed tuple + target.append(Comma()) + source.prefix = u"" + setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()])) + power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) + return setup_line, power_line + + def fix_implicit_context(self, node, results): + u""" + Only example of the implicit context is + a for loop, so only fix that. + """ + pre, name, post, it = (results.get(n) for n in (u"pre", u"name", u"post", u"it")) + pre = [n.clone() for n in pre if n.type == token.NAME] + name.prefix = u" " + post = [n.clone() for n in post if n.type == token.NAME] + target = [n.clone() for n in commatize(pre + [name.clone()] + post)] + # to make the special-case fix for "*z, = ..." correct with the least + # amount of modification, make the left-side into a guaranteed tuple + target.append(Comma()) + source = it.clone() + source.prefix = u"" + setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [Name(self.ITERNAME)])) + power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) + return setup_line, power_line + + def transform(self, node, results): + u""" + a,b,c,d,e,f,*g,h,i = range(100) changes to + _3to2list = list(range(100)) + a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:] + + and + + for a,b,*c,d,e in iter_of_iters: do_stuff changes to + for _3to2iter in iter_of_iters: + _3to2list = list(_3to2iter) + a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:] + do_stuff + """ + self.LISTNAME = self.new_name(u"_3to2list") + self.ITERNAME = self.new_name(u"_3to2iter") + expl, impl = results.get(u"expl"), results.get(u"impl") + if expl is not None: + setup_line, power_line = self.fix_explicit_context(node, results) + setup_line.prefix = expl.prefix + power_line.prefix = indentation(expl.parent) + setup_line.append_child(Newline()) + parent = node.parent + i = node.remove() + parent.insert_child(i, power_line) + parent.insert_child(i, setup_line) + elif impl is not None: + setup_line, power_line = self.fix_implicit_context(node, results) + suitify(node) + suite = [k for k in node.children if k.type == syms.suite][0] + setup_line.prefix = u"" + power_line.prefix = suite.children[1].value + suite.children[2].prefix = indentation(suite.children[2]) + suite.insert_child(2, Newline()) + suite.insert_child(2, power_line) + suite.insert_child(2, Newline()) + suite.insert_child(2, setup_line) + results.get(u"lst").replace(Name(self.ITERNAME, prefix=u" ")) diff --git a/.install/.kodi/addons/script.module.future/libs/libpasteurize/main.py b/.install/.kodi/addons/script.module.future/libs/libpasteurize/main.py new file mode 100644 index 000000000..4179174b5 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/libpasteurize/main.py @@ -0,0 +1,204 @@ +""" +pasteurize: automatic conversion of Python 3 code to clean 2/3 code +=================================================================== + +``pasteurize`` attempts to convert existing Python 3 code into source-compatible +Python 2 and 3 code. + +Use it like this on Python 3 code: + + $ pasteurize --verbose mypython3script.py + +This removes any Py3-only syntax (e.g. new metaclasses) and adds these +import lines: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + from future import standard_library + standard_library.install_hooks() + from builtins import * + +To write changes to the files, use the -w flag. + +It also adds any other wrappers needed for Py2/3 compatibility. + +Note that separate stages are not available (or needed) when converting from +Python 3 with ``pasteurize`` as they are when converting from Python 2 with +``futurize``. + +The --all-imports option forces adding all ``__future__`` imports, +``builtins`` imports, and standard library aliases, even if they don't +seem necessary for the current state of each module. (This can simplify +testing, and can reduce the need to think about Py2 compatibility when editing +the code further.) + +""" + +from __future__ import (absolute_import, print_function, unicode_literals) + +import sys +import logging +import optparse +from lib2to3.main import main, warn, StdoutRefactoringTool +from lib2to3 import refactor + +from future import __version__ +from libpasteurize.fixes import fix_names + + +def main(args=None): + """Main program. + + Returns a suggested exit status (0, 1, 2). + """ + # Set up option parser + parser = optparse.OptionParser(usage="pasteurize [options] file|dir ...") + parser.add_option("-V", "--version", action="store_true", + help="Report the version number of pasteurize") + parser.add_option("-a", "--all-imports", action="store_true", + help="Adds all __future__ and future imports to each module") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a fixer from being run.") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + # parser.add_option("-p", "--print-function", action="store_true", + # help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files.") + + # Parse command line arguments + refactor_stdin = False + flags = {} + options, args = parser.parse_args(args) + fixer_pkg = 'libpasteurize.fixes' + avail_fixes = fix_names + flags["print_function"] = True + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if options.version: + print(__version__) + return 0 + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + for fixname in sorted(avail_fixes): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", + file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) + + extra_fixes = set() + if options.all_imports: + prefix = 'libpasteurize.fixes.' + extra_fixes.add(prefix + 'fix_add_all__future__imports') + extra_fixes.add(prefix + 'fix_add_future_standard_library_import') + extra_fixes.add(prefix + 'fix_add_all_future_builtins') + + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libpasteurize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + + fixer_names = requested | extra_fixes - unwanted_fixes + + # Initialize the refactoring tool + rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(), + options.nobackups, not options.no_diffs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, None, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/.install/.kodi/addons/script.module.future/libs/past/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/__init__.py new file mode 100644 index 000000000..3b5d9db17 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/__init__.py @@ -0,0 +1,92 @@ +# coding=utf-8 +""" +past: compatibility with Python 2 from Python 3 +=============================================== + +``past`` is a package to aid with Python 2/3 compatibility. Whereas ``future`` +contains backports of Python 3 constructs to Python 2, ``past`` provides +implementations of some Python 2 constructs in Python 3 and tools to import and +run Python 2 code in Python 3. It is intended to be used sparingly, as a way of +running old Python 2 code from Python 3 until the code is ported properly. + +Potential uses for libraries: + +- as a step in porting a Python 2 codebase to Python 3 (e.g. with the ``futurize`` script) +- to provide Python 3 support for previously Python 2-only libraries with the + same APIs as on Python 2 -- particularly with regard to 8-bit strings (the + ``past.builtins.str`` type). +- to aid in providing minimal-effort Python 3 support for applications using + libraries that do not yet wish to upgrade their code properly to Python 3, or + wish to upgrade it gradually to Python 3 style. + + +Here are some code examples that run identically on Python 3 and 2:: + + >>> from past.builtins import str as oldstr + + >>> philosopher = oldstr(u'\u5b54\u5b50'.encode('utf-8')) + >>> # This now behaves like a Py2 byte-string on both Py2 and Py3. + >>> # For example, indexing returns a Python 2-like string object, not + >>> # an integer: + >>> philosopher[0] + '\xe5' + >>> type(philosopher[0]) + + + >>> # List-producing versions of range, reduce, map, filter + >>> from past.builtins import range, reduce + >>> range(10) + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) + 15 + + >>> # Other functions removed in Python 3 are resurrected ... + >>> from past.builtins import execfile + >>> execfile('myfile.py') + + >>> from past.builtins import raw_input + >>> name = raw_input('What is your name? ') + What is your name? [cursor] + + >>> from past.builtins import reload + >>> reload(mymodule) # equivalent to imp.reload(mymodule) in Python 3 + + >>> from past.builtins import xrange + >>> for i in xrange(10): + ... pass + + +It also provides import hooks so you can import and use Python 2 modules like +this:: + + $ python3 + + >>> from past import autotranslate + >>> authotranslate('mypy2module') + >>> import mypy2module + +until the authors of the Python 2 modules have upgraded their code. Then, for +example:: + + >>> mypy2module.func_taking_py2_string(oldstr(b'abcd')) + + +Credits +------- + +:Author: Ed Schofield +:Sponsor: Python Charmers Pty Ltd, Australia: http://pythoncharmers.com + + +Licensing +--------- +Copyright 2013-2018 Python Charmers Pty Ltd, Australia. +The software is distributed under an MIT licence. See LICENSE.txt. +""" + + +from past.translation import install_hooks as autotranslate +from future import __version__, __copyright__, __license__ + +__title__ = 'past' +__author__ = 'Ed Schofield' diff --git a/.install/.kodi/addons/script.module.future/libs/past/builtins/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/builtins/__init__.py new file mode 100644 index 000000000..1b19e373c --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/builtins/__init__.py @@ -0,0 +1,72 @@ +""" +A resurrection of some old functions from Python 2 for use in Python 3. These +should be used sparingly, to help with porting efforts, since code using them +is no longer standard Python 3 code. + +This module provides the following: + +1. Implementations of these builtin functions which have no equivalent on Py3: + +- apply +- chr +- cmp +- execfile + +2. Aliases: + +- intern <- sys.intern +- raw_input <- input +- reduce <- functools.reduce +- reload <- imp.reload +- unichr <- chr +- unicode <- str +- xrange <- range + +3. List-producing versions of the corresponding Python 3 iterator-producing functions: + +- filter +- map +- range +- zip + +4. Forward-ported Py2 types: + +- basestring +- dict +- str +- long +- unicode + +""" + +from future.utils import PY3 +from past.builtins.noniterators import (filter, map, range, reduce, zip) +# from past.builtins.misc import (ascii, hex, input, oct, open) +if PY3: + from past.types import (basestring, + olddict as dict, + oldstr as str, + long, + unicode) +else: + from __builtin__ import (basestring, dict, str, long, unicode) + +from past.builtins.misc import (apply, chr, cmp, execfile, intern, oct, + raw_input, reload, unichr, unicode, xrange) +from past import utils + + +if utils.PY3: + # We only import names that shadow the builtins on Py3. No other namespace + # pollution on Py3. + + # Only shadow builtins on Py3; no new names + __all__ = ['filter', 'map', 'range', 'reduce', 'zip', + 'basestring', 'dict', 'str', 'long', 'unicode', + 'apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', + 'reload', 'unichr', 'xrange' + ] + +else: + # No namespace pollution on Py2 + __all__ = [] diff --git a/.install/.kodi/addons/script.module.future/libs/past/builtins/misc.py b/.install/.kodi/addons/script.module.future/libs/past/builtins/misc.py new file mode 100644 index 000000000..06fbb92d2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/builtins/misc.py @@ -0,0 +1,89 @@ +from __future__ import unicode_literals +import sys +import inspect +from collections import Mapping + +from future.utils import PY3, exec_ + + +if PY3: + import builtins + + def apply(f, *args, **kw): + return f(*args, **kw) + + from past.builtins import str as oldstr + + def chr(i): + """ + Return a byte-string of one character with ordinal i; 0 <= i <= 256 + """ + return oldstr(bytes((i,))) + + def cmp(x, y): + """ + cmp(x, y) -> integer + + Return negative if xy. + """ + return (x > y) - (x < y) + + from sys import intern + + def oct(number): + """oct(number) -> string + + Return the octal representation of an integer + """ + return '0' + builtins.oct(number)[2:] + + raw_input = input + from imp import reload + unicode = str + unichr = chr + xrange = range +else: + import __builtin__ + apply = __builtin__.apply + chr = __builtin__.chr + cmp = __builtin__.cmp + execfile = __builtin__.execfile + intern = __builtin__.intern + oct = __builtin__.oct + raw_input = __builtin__.raw_input + reload = __builtin__.reload + unicode = __builtin__.unicode + unichr = __builtin__.unichr + xrange = __builtin__.xrange + + +if PY3: + def execfile(filename, myglobals=None, mylocals=None): + """ + Read and execute a Python script from a file in the given namespaces. + The globals and locals are dictionaries, defaulting to the current + globals and locals. If only globals is given, locals defaults to it. + """ + if myglobals is None: + # There seems to be no alternative to frame hacking here. + caller_frame = inspect.stack()[1] + myglobals = caller_frame[0].f_globals + mylocals = caller_frame[0].f_locals + elif mylocals is None: + # Only if myglobals is given do we set mylocals to it. + mylocals = myglobals + if not isinstance(myglobals, Mapping): + raise TypeError('globals must be a mapping') + if not isinstance(mylocals, Mapping): + raise TypeError('locals must be a mapping') + with open(filename, "rbU") as fin: + source = fin.read() + code = compile(source, filename, "exec") + exec_(code, myglobals, mylocals) + + +if PY3: + __all__ = ['apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', + 'reload', 'unichr', 'unicode', 'xrange'] +else: + __all__ = [] diff --git a/.install/.kodi/addons/script.module.future/libs/past/builtins/noniterators.py b/.install/.kodi/addons/script.module.future/libs/past/builtins/noniterators.py new file mode 100644 index 000000000..5826b97c1 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/builtins/noniterators.py @@ -0,0 +1,272 @@ +""" +This module is designed to be used as follows:: + + from past.builtins.noniterators import filter, map, range, reduce, zip + +And then, for example:: + + assert isinstance(range(5), list) + +The list-producing functions this brings in are:: + +- ``filter`` +- ``map`` +- ``range`` +- ``reduce`` +- ``zip`` + +""" + +from __future__ import division, absolute_import, print_function + +from itertools import chain, starmap +import itertools # since zip_longest doesn't exist on Py2 +from past.types import basestring +from past.utils import PY3 + + +def flatmap(f, items): + return chain.from_iterable(map(f, items)) + + +if PY3: + import builtins + + # list-producing versions of the major Python iterating functions + def oldfilter(*args): + """ + filter(function or None, sequence) -> list, tuple, or string + + Return those items of sequence for which function(item) is true. + If function is None, return the items that are true. If sequence + is a tuple or string, return the same type, else return a list. + """ + mytype = type(args[1]) + if isinstance(args[1], basestring): + return mytype().join(builtins.filter(*args)) + elif isinstance(args[1], (tuple, list)): + return mytype(builtins.filter(*args)) + else: + # Fall back to list. Is this the right thing to do? + return list(builtins.filter(*args)) + + # This is surprisingly difficult to get right. For example, the + # solutions here fail with the test cases in the docstring below: + # http://stackoverflow.com/questions/8072755/ + def oldmap(func, *iterables): + """ + map(function, sequence[, sequence, ...]) -> list + + Return a list of the results of applying the function to the + items of the argument sequence(s). If more than one sequence is + given, the function is called with an argument list consisting of + the corresponding item of each sequence, substituting None for + missing values when not all sequences have the same length. If + the function is None, return a list of the items of the sequence + (or a list of tuples if more than one sequence). + + Test cases: + >>> oldmap(None, 'hello world') + ['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd'] + + >>> oldmap(None, range(4)) + [0, 1, 2, 3] + + More test cases are in past.tests.test_builtins. + """ + zipped = itertools.zip_longest(*iterables) + l = list(zipped) + if len(l) == 0: + return [] + if func is None: + result = l + else: + result = list(starmap(func, l)) + + # Inspect to see whether it's a simple sequence of tuples + try: + if max([len(item) for item in result]) == 1: + return list(chain.from_iterable(result)) + # return list(flatmap(func, result)) + except TypeError as e: + # Simple objects like ints have no len() + pass + return result + + ############################ + ### For reference, the source code for Py2.7 map function: + # static PyObject * + # builtin_map(PyObject *self, PyObject *args) + # { + # typedef struct { + # PyObject *it; /* the iterator object */ + # int saw_StopIteration; /* bool: did the iterator end? */ + # } sequence; + # + # PyObject *func, *result; + # sequence *seqs = NULL, *sqp; + # Py_ssize_t n, len; + # register int i, j; + # + # n = PyTuple_Size(args); + # if (n < 2) { + # PyErr_SetString(PyExc_TypeError, + # "map() requires at least two args"); + # return NULL; + # } + # + # func = PyTuple_GetItem(args, 0); + # n--; + # + # if (func == Py_None) { + # if (PyErr_WarnPy3k("map(None, ...) not supported in 3.x; " + # "use list(...)", 1) < 0) + # return NULL; + # if (n == 1) { + # /* map(None, S) is the same as list(S). */ + # return PySequence_List(PyTuple_GetItem(args, 1)); + # } + # } + # + # /* Get space for sequence descriptors. Must NULL out the iterator + # * pointers so that jumping to Fail_2 later doesn't see trash. + # */ + # if ((seqs = PyMem_NEW(sequence, n)) == NULL) { + # PyErr_NoMemory(); + # return NULL; + # } + # for (i = 0; i < n; ++i) { + # seqs[i].it = (PyObject*)NULL; + # seqs[i].saw_StopIteration = 0; + # } + # + # /* Do a first pass to obtain iterators for the arguments, and set len + # * to the largest of their lengths. + # */ + # len = 0; + # for (i = 0, sqp = seqs; i < n; ++i, ++sqp) { + # PyObject *curseq; + # Py_ssize_t curlen; + # + # /* Get iterator. */ + # curseq = PyTuple_GetItem(args, i+1); + # sqp->it = PyObject_GetIter(curseq); + # if (sqp->it == NULL) { + # static char errmsg[] = + # "argument %d to map() must support iteration"; + # char errbuf[sizeof(errmsg) + 25]; + # PyOS_snprintf(errbuf, sizeof(errbuf), errmsg, i+2); + # PyErr_SetString(PyExc_TypeError, errbuf); + # goto Fail_2; + # } + # + # /* Update len. */ + # curlen = _PyObject_LengthHint(curseq, 8); + # if (curlen > len) + # len = curlen; + # } + # + # /* Get space for the result list. */ + # if ((result = (PyObject *) PyList_New(len)) == NULL) + # goto Fail_2; + # + # /* Iterate over the sequences until all have stopped. */ + # for (i = 0; ; ++i) { + # PyObject *alist, *item=NULL, *value; + # int numactive = 0; + # + # if (func == Py_None && n == 1) + # alist = NULL; + # else if ((alist = PyTuple_New(n)) == NULL) + # goto Fail_1; + # + # for (j = 0, sqp = seqs; j < n; ++j, ++sqp) { + # if (sqp->saw_StopIteration) { + # Py_INCREF(Py_None); + # item = Py_None; + # } + # else { + # item = PyIter_Next(sqp->it); + # if (item) + # ++numactive; + # else { + # if (PyErr_Occurred()) { + # Py_XDECREF(alist); + # goto Fail_1; + # } + # Py_INCREF(Py_None); + # item = Py_None; + # sqp->saw_StopIteration = 1; + # } + # } + # if (alist) + # PyTuple_SET_ITEM(alist, j, item); + # else + # break; + # } + # + # if (!alist) + # alist = item; + # + # if (numactive == 0) { + # Py_DECREF(alist); + # break; + # } + # + # if (func == Py_None) + # value = alist; + # else { + # value = PyEval_CallObject(func, alist); + # Py_DECREF(alist); + # if (value == NULL) + # goto Fail_1; + # } + # if (i >= len) { + # int status = PyList_Append(result, value); + # Py_DECREF(value); + # if (status < 0) + # goto Fail_1; + # } + # else if (PyList_SetItem(result, i, value) < 0) + # goto Fail_1; + # } + # + # if (i < len && PyList_SetSlice(result, i, len, NULL) < 0) + # goto Fail_1; + # + # goto Succeed; + # + # Fail_1: + # Py_DECREF(result); + # Fail_2: + # result = NULL; + # Succeed: + # assert(seqs); + # for (i = 0; i < n; ++i) + # Py_XDECREF(seqs[i].it); + # PyMem_DEL(seqs); + # return result; + # } + + def oldrange(*args, **kwargs): + return list(builtins.range(*args, **kwargs)) + + def oldzip(*args, **kwargs): + return list(builtins.zip(*args, **kwargs)) + + filter = oldfilter + map = oldmap + range = oldrange + from functools import reduce + zip = oldzip + __all__ = ['filter', 'map', 'range', 'reduce', 'zip'] + +else: + import __builtin__ + # Python 2-builtin ranges produce lists + filter = __builtin__.filter + map = __builtin__.map + range = __builtin__.range + reduce = __builtin__.reduce + zip = __builtin__.zip + __all__ = [] diff --git a/.install/.kodi/addons/script.module.future/libs/past/tests/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.install/.kodi/addons/script.module.future/libs/past/translation/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/translation/__init__.py new file mode 100644 index 000000000..c7ae2b7a0 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/translation/__init__.py @@ -0,0 +1,497 @@ +# -*- coding: utf-8 -*- +""" +past.translation +================== + +The ``past.translation`` package provides an import hook for Python 3 which +transparently runs ``futurize`` fixers over Python 2 code on import to convert +print statements into functions, etc. + +It is intended to assist users in migrating to Python 3.x even if some +dependencies still only support Python 2.x. + +Usage +----- + +Once your Py2 package is installed in the usual module search path, the import +hook is invoked as follows: + + >>> from past import autotranslate + >>> autotranslate('mypackagename') + +Or: + + >>> autotranslate(['mypackage1', 'mypackage2']) + +You can unregister the hook using:: + + >>> from past.translation import remove_hooks + >>> remove_hooks() + +Author: Ed Schofield. +Inspired by and based on ``uprefix`` by Vinay M. Sajip. +""" + +import imp +import logging +import marshal +import os +import sys +import copy +from lib2to3.pgen2.parse import ParseError +from lib2to3.refactor import RefactoringTool + +from libfuturize import fixes + + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +myfixes = (list(fixes.libfuturize_fix_names_stage1) + + list(fixes.lib2to3_fix_names_stage1) + + list(fixes.libfuturize_fix_names_stage2) + + list(fixes.lib2to3_fix_names_stage2)) + + +# We detect whether the code is Py2 or Py3 by applying certain lib2to3 fixers +# to it. If the diff is empty, it's Python 3 code. + +py2_detect_fixers = [ +# From stage 1: + 'lib2to3.fixes.fix_apply', + # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 + 'lib2to3.fixes.fix_except', + 'lib2to3.fixes.fix_execfile', + 'lib2to3.fixes.fix_exitfunc', + 'lib2to3.fixes.fix_funcattrs', + 'lib2to3.fixes.fix_filter', + 'lib2to3.fixes.fix_has_key', + 'lib2to3.fixes.fix_idioms', + 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) + 'lib2to3.fixes.fix_intern', + 'lib2to3.fixes.fix_isinstance', + 'lib2to3.fixes.fix_methodattrs', + 'lib2to3.fixes.fix_ne', + 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 + 'lib2to3.fixes.fix_paren', + 'lib2to3.fixes.fix_print', + 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions + 'lib2to3.fixes.fix_renames', + 'lib2to3.fixes.fix_reduce', + # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support + 'lib2to3.fixes.fix_repr', + 'lib2to3.fixes.fix_standarderror', + 'lib2to3.fixes.fix_sys_exc', + 'lib2to3.fixes.fix_throw', + 'lib2to3.fixes.fix_tuple_params', + 'lib2to3.fixes.fix_types', + 'lib2to3.fixes.fix_ws_comma', + 'lib2to3.fixes.fix_xreadlines', + +# From stage 2: + 'lib2to3.fixes.fix_basestring', + # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. + # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ + # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. + 'lib2to3.fixes.fix_exec', + # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports + 'lib2to3.fixes.fix_getcwdu', + # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library + # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) + # 'lib2to3.fixes.fix_input', + # 'lib2to3.fixes.fix_itertools', + # 'lib2to3.fixes.fix_itertools_imports', + 'lib2to3.fixes.fix_long', + # 'lib2to3.fixes.fix_map', + # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead + 'lib2to3.fixes.fix_next', + 'lib2to3.fixes.fix_nonzero', # TODO: add a decorator for mapping __bool__ to __nonzero__ + # 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 + 'lib2to3.fixes.fix_raw_input', + # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings + # 'lib2to3.fixes.fix_urllib', + 'lib2to3.fixes.fix_xrange', + # 'lib2to3.fixes.fix_zip', +] + + +class RTs: + """ + A namespace for the refactoring tools. This avoids creating these at + the module level, which slows down the module import. (See issue #117). + + There are two possible grammars: with or without the print statement. + Hence we have two possible refactoring tool implementations. + """ + _rt = None + _rtp = None + _rt_py2_detect = None + _rtp_py2_detect = None + + @staticmethod + def setup(): + """ + Call this before using the refactoring tools to create them on demand + if needed. + """ + if None in [RTs._rt, RTs._rtp]: + RTs._rt = RefactoringTool(myfixes) + RTs._rtp = RefactoringTool(myfixes, {'print_function': True}) + + + @staticmethod + def setup_detect_python2(): + """ + Call this before using the refactoring tools to create them on demand + if needed. + """ + if None in [RTs._rt_py2_detect, RTs._rtp_py2_detect]: + RTs._rt_py2_detect = RefactoringTool(py2_detect_fixers) + RTs._rtp_py2_detect = RefactoringTool(py2_detect_fixers, + {'print_function': True}) + + +# We need to find a prefix for the standard library, as we don't want to +# process any files there (they will already be Python 3). +# +# The following method is used by Sanjay Vinip in uprefix. This fails for +# ``conda`` environments: +# # In a non-pythonv virtualenv, sys.real_prefix points to the installed Python. +# # In a pythonv venv, sys.base_prefix points to the installed Python. +# # Outside a virtual environment, sys.prefix points to the installed Python. + +# if hasattr(sys, 'real_prefix'): +# _syslibprefix = sys.real_prefix +# else: +# _syslibprefix = getattr(sys, 'base_prefix', sys.prefix) + +# Instead, we use the portion of the path common to both the stdlib modules +# ``math`` and ``urllib``. + +def splitall(path): + """ + Split a path into all components. From Python Cookbook. + """ + allparts = [] + while True: + parts = os.path.split(path) + if parts[0] == path: # sentinel for absolute paths + allparts.insert(0, parts[0]) + break + elif parts[1] == path: # sentinel for relative paths + allparts.insert(0, parts[1]) + break + else: + path = parts[0] + allparts.insert(0, parts[1]) + return allparts + + +def common_substring(s1, s2): + """ + Returns the longest common substring to the two strings, starting from the + left. + """ + chunks = [] + path1 = splitall(s1) + path2 = splitall(s2) + for (dir1, dir2) in zip(path1, path2): + if dir1 != dir2: + break + chunks.append(dir1) + return os.path.join(*chunks) + +# _stdlibprefix = common_substring(math.__file__, urllib.__file__) + + +def detect_python2(source, pathname): + """ + Returns a bool indicating whether we think the code is Py2 + """ + RTs.setup_detect_python2() + try: + tree = RTs._rt_py2_detect.refactor_string(source, pathname) + except ParseError as e: + if e.msg != 'bad input' or e.value != '=': + raise + tree = RTs._rtp.refactor_string(source, pathname) + + if source != str(tree)[:-1]: # remove added newline + # The above fixers made changes, so we conclude it's Python 2 code + logger.debug('Detected Python 2 code: {0}'.format(pathname)) + with open('/tmp/original_code.py', 'w') as f: + f.write('### Original code (detected as py2): %s\n%s' % + (pathname, source)) + with open('/tmp/py2_detection_code.py', 'w') as f: + f.write('### Code after running py3 detection (from %s)\n%s' % + (pathname, str(tree)[:-1])) + return True + else: + logger.debug('Detected Python 3 code: {0}'.format(pathname)) + with open('/tmp/original_code.py', 'w') as f: + f.write('### Original code (detected as py3): %s\n%s' % + (pathname, source)) + try: + os.remove('/tmp/futurize_code.py') + except OSError: + pass + return False + + +class Py2Fixer(object): + """ + An import hook class that uses lib2to3 for source-to-source translation of + Py2 code to Py3. + """ + + # See the comments on :class:future.standard_library.RenameImport. + # We add this attribute here so remove_hooks() and install_hooks() can + # unambiguously detect whether the import hook is installed: + PY2FIXER = True + + def __init__(self): + self.found = None + self.base_exclude_paths = ['future', 'past'] + self.exclude_paths = copy.copy(self.base_exclude_paths) + self.include_paths = [] + + def include(self, paths): + """ + Pass in a sequence of module names such as 'plotrique.plotting' that, + if present at the leftmost side of the full package name, would + specify the module to be transformed from Py2 to Py3. + """ + self.include_paths += paths + + def exclude(self, paths): + """ + Pass in a sequence of strings such as 'mymodule' that, if + present at the leftmost side of the full package name, would cause + the module not to undergo any source transformation. + """ + self.exclude_paths += paths + + def find_module(self, fullname, path=None): + logger.debug('Running find_module: {0}...'.format(fullname)) + if '.' in fullname: + parent, child = fullname.rsplit('.', 1) + if path is None: + loader = self.find_module(parent, path) + mod = loader.load_module(parent) + path = mod.__path__ + fullname = child + + # Perhaps we should try using the new importlib functionality in Python + # 3.3: something like this? + # thing = importlib.machinery.PathFinder.find_module(fullname, path) + try: + self.found = imp.find_module(fullname, path) + except Exception as e: + logger.debug('Py2Fixer could not find {0}') + logger.debug('Exception was: {0})'.format(fullname, e)) + return None + self.kind = self.found[-1][-1] + if self.kind == imp.PKG_DIRECTORY: + self.pathname = os.path.join(self.found[1], '__init__.py') + elif self.kind == imp.PY_SOURCE: + self.pathname = self.found[1] + return self + + def transform(self, source): + # This implementation uses lib2to3, + # you can override and use something else + # if that's better for you + + # lib2to3 likes a newline at the end + RTs.setup() + source += '\n' + try: + tree = RTs._rt.refactor_string(source, self.pathname) + except ParseError as e: + if e.msg != 'bad input' or e.value != '=': + raise + tree = RTs._rtp.refactor_string(source, self.pathname) + # could optimise a bit for only doing str(tree) if + # getattr(tree, 'was_changed', False) returns True + return str(tree)[:-1] # remove added newline + + def load_module(self, fullname): + logger.debug('Running load_module for {0}...'.format(fullname)) + if fullname in sys.modules: + mod = sys.modules[fullname] + else: + if self.kind in (imp.PY_COMPILED, imp.C_EXTENSION, imp.C_BUILTIN, + imp.PY_FROZEN): + convert = False + # elif (self.pathname.startswith(_stdlibprefix) + # and 'site-packages' not in self.pathname): + # # We assume it's a stdlib package in this case. Is this too brittle? + # # Please file a bug report at https://github.com/PythonCharmers/python-future + # # if so. + # convert = False + # in theory, other paths could be configured to be excluded here too + elif any([fullname.startswith(path) for path in self.exclude_paths]): + convert = False + elif any([fullname.startswith(path) for path in self.include_paths]): + convert = True + else: + convert = False + if not convert: + logger.debug('Excluded {0} from translation'.format(fullname)) + mod = imp.load_module(fullname, *self.found) + else: + logger.debug('Autoconverting {0} ...'.format(fullname)) + mod = imp.new_module(fullname) + sys.modules[fullname] = mod + + # required by PEP 302 + mod.__file__ = self.pathname + mod.__name__ = fullname + mod.__loader__ = self + + # This: + # mod.__package__ = '.'.join(fullname.split('.')[:-1]) + # seems to result in "SystemError: Parent module '' not loaded, + # cannot perform relative import" for a package's __init__.py + # file. We use the approach below. Another option to try is the + # minimal load_module pattern from the PEP 302 text instead. + + # Is the test in the next line more or less robust than the + # following one? Presumably less ... + # ispkg = self.pathname.endswith('__init__.py') + + if self.kind == imp.PKG_DIRECTORY: + mod.__path__ = [ os.path.dirname(self.pathname) ] + mod.__package__ = fullname + else: + #else, regular module + mod.__path__ = [] + mod.__package__ = fullname.rpartition('.')[0] + + try: + cachename = imp.cache_from_source(self.pathname) + if not os.path.exists(cachename): + update_cache = True + else: + sourcetime = os.stat(self.pathname).st_mtime + cachetime = os.stat(cachename).st_mtime + update_cache = cachetime < sourcetime + # # Force update_cache to work around a problem with it being treated as Py3 code??? + # update_cache = True + if not update_cache: + with open(cachename, 'rb') as f: + data = f.read() + try: + code = marshal.loads(data) + except Exception: + # pyc could be corrupt. Regenerate it + update_cache = True + if update_cache: + if self.found[0]: + source = self.found[0].read() + elif self.kind == imp.PKG_DIRECTORY: + with open(self.pathname) as f: + source = f.read() + + if detect_python2(source, self.pathname): + source = self.transform(source) + with open('/tmp/futurized_code.py', 'w') as f: + f.write('### Futurized code (from %s)\n%s' % + (self.pathname, source)) + + code = compile(source, self.pathname, 'exec') + + dirname = os.path.dirname(cachename) + try: + if not os.path.exists(dirname): + os.makedirs(dirname) + with open(cachename, 'wb') as f: + data = marshal.dumps(code) + f.write(data) + except Exception: # could be write-protected + pass + exec(code, mod.__dict__) + except Exception as e: + # must remove module from sys.modules + del sys.modules[fullname] + raise # keep it simple + + if self.found[0]: + self.found[0].close() + return mod + +_hook = Py2Fixer() + + +def install_hooks(include_paths=(), exclude_paths=()): + if isinstance(include_paths, str): + include_paths = (include_paths,) + if isinstance(exclude_paths, str): + exclude_paths = (exclude_paths,) + assert len(include_paths) + len(exclude_paths) > 0, 'Pass at least one argument' + _hook.include(include_paths) + _hook.exclude(exclude_paths) + # _hook.debug = debug + enable = sys.version_info[0] >= 3 # enabled for all 3.x + if enable and _hook not in sys.meta_path: + sys.meta_path.insert(0, _hook) # insert at beginning. This could be made a parameter + + # We could return the hook when there are ways of configuring it + #return _hook + + +def remove_hooks(): + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + +def detect_hooks(): + """ + Returns True if the import hooks are installed, False if not. + """ + return _hook in sys.meta_path + # present = any([hasattr(hook, 'PY2FIXER') for hook in sys.meta_path]) + # return present + + +class hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from past import translation + >>> with translation.hooks(): + ... import mypy2module + >>> import requests # py2/3 compatible anyway + >>> # etc. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + install_hooks() + return self + + def __exit__(self, *args): + if not self.hooks_were_installed: + remove_hooks() + + +class suspend_hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from past import translation + >>> translation.install_hooks() + >>> import http.client + >>> # ... + >>> with translation.suspend_hooks(): + >>> import requests # or others that support Py2/3 + + If the hooks were disabled before the context, they are not installed when + the context is left. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + remove_hooks() + return self + def __exit__(self, *args): + if self.hooks_were_installed: + install_hooks() diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/types/__init__.py new file mode 100644 index 000000000..91dd270f2 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/types/__init__.py @@ -0,0 +1,29 @@ +""" +Forward-ports of types from Python 2 for use with Python 3: + +- ``basestring``: equivalent to ``(str, bytes)`` in ``isinstance`` checks +- ``dict``: with list-producing .keys() etc. methods +- ``str``: bytes-like, but iterating over them doesn't product integers +- ``long``: alias of Py3 int with ``L`` suffix in the ``repr`` +- ``unicode``: alias of Py3 str with ``u`` prefix in the ``repr`` + +""" + +from past import utils + +if utils.PY2: + import __builtin__ + basestring = __builtin__.basestring + dict = __builtin__.dict + str = __builtin__.str + long = __builtin__.long + unicode = __builtin__.unicode + __all__ = [] +else: + from .basestring import basestring + from .olddict import olddict + from .oldstr import oldstr + long = int + unicode = str + # from .unicode import unicode + __all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode'] diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/basestring.py b/.install/.kodi/addons/script.module.future/libs/past/types/basestring.py new file mode 100644 index 000000000..1cab22f6c --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/types/basestring.py @@ -0,0 +1,39 @@ +""" +An implementation of the basestring type for Python 3 + +Example use: + +>>> s = b'abc' +>>> assert isinstance(s, basestring) +>>> from past.types import str as oldstr +>>> s2 = oldstr(b'abc') +>>> assert isinstance(s2, basestring) + +""" + +import sys + +from past.utils import with_metaclass, PY2 + +if PY2: + str = unicode + +ver = sys.version_info[:2] + + +class BaseBaseString(type): + def __instancecheck__(cls, instance): + return isinstance(instance, (bytes, str)) + + def __subclasshook__(cls, thing): + # TODO: What should go here? + raise NotImplemented + + +class basestring(with_metaclass(BaseBaseString)): + """ + A minimal backport of the Python 2 basestring type to Py3 + """ + + +__all__ = ['basestring'] diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/olddict.py b/.install/.kodi/addons/script.module.future/libs/past/types/olddict.py new file mode 100644 index 000000000..f4f92a26a --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/types/olddict.py @@ -0,0 +1,96 @@ +""" +A dict subclass for Python 3 that behaves like Python 2's dict + +Example use: + +>>> from past.builtins import dict +>>> d1 = dict() # instead of {} for an empty dict +>>> d2 = dict(key1='value1', key2='value2') + +The keys, values and items methods now return lists on Python 3.x and there are +methods for iterkeys, itervalues, iteritems, and viewkeys etc. + +>>> for d in (d1, d2): +... assert isinstance(d.keys(), list) +... assert isinstance(d.values(), list) +... assert isinstance(d.items(), list) +""" + +import sys + +from past.utils import with_metaclass + + +_builtin_dict = dict +ver = sys.version_info[:2] + + +class BaseOldDict(type): + def __instancecheck__(cls, instance): + return isinstance(instance, _builtin_dict) + + +class olddict(with_metaclass(BaseOldDict, _builtin_dict)): + """ + A backport of the Python 3 dict object to Py2 + """ + iterkeys = _builtin_dict.keys + viewkeys = _builtin_dict.keys + + def keys(self): + return list(super(olddict, self).keys()) + + itervalues = _builtin_dict.values + viewvalues = _builtin_dict.values + + def values(self): + return list(super(olddict, self).values()) + + iteritems = _builtin_dict.items + viewitems = _builtin_dict.items + + def items(self): + return list(super(olddict, self).items()) + + def has_key(self, k): + """ + D.has_key(k) -> True if D has a key k, else False + """ + return k in self + + # def __new__(cls, *args, **kwargs): + # """ + # dict() -> new empty dictionary + # dict(mapping) -> new dictionary initialized from a mapping object's + # (key, value) pairs + # dict(iterable) -> new dictionary initialized as if via: + # d = {} + # for k, v in iterable: + # d[k] = v + # dict(**kwargs) -> new dictionary initialized with the name=value pairs + # in the keyword argument list. For example: dict(one=1, two=2) + + # """ + # + # if len(args) == 0: + # return super(olddict, cls).__new__(cls) + # # Was: elif isinstance(args[0], newbytes): + # # We use type() instead of the above because we're redefining + # # this to be True for all unicode string subclasses. Warning: + # # This may render newstr un-subclassable. + # elif type(args[0]) == olddict: + # return args[0] + # # elif isinstance(args[0], _builtin_dict): + # # value = args[0] + # else: + # value = args[0] + # return super(olddict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the past.utils.native() function + """ + return super(oldbytes, self) + + +__all__ = ['olddict'] diff --git a/.install/.kodi/addons/script.module.future/libs/past/types/oldstr.py b/.install/.kodi/addons/script.module.future/libs/past/types/oldstr.py new file mode 100644 index 000000000..7768d3284 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/types/oldstr.py @@ -0,0 +1,132 @@ +""" +Pure-Python implementation of a Python 2-like str object for Python 3. +""" + +from collections import Iterable +from numbers import Integral + +from past.utils import PY2, with_metaclass + + +_builtin_bytes = bytes + + +class BaseOldStr(type): + def __instancecheck__(cls, instance): + return isinstance(instance, _builtin_bytes) + + +def unescape(s): + """ + Interprets strings with escape sequences + + Example: + >>> s = unescape(r'abc\\def') # i.e. 'abc\\\\def' + >>> print(s) + 'abc\def' + >>> s2 = unescape('abc\\ndef') + >>> len(s2) + 8 + >>> print(s2) + abc + def + """ + return s.encode().decode('unicode_escape') + + +class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)): + """ + A forward port of the Python 2 8-bit string object to Py3 + """ + # Python 2 strings have no __iter__ method: + @property + def __iter__(self): + raise AttributeError + + def __dir__(self): + return [thing for thing in dir(_builtin_bytes) if thing != '__iter__'] + + # def __new__(cls, *args, **kwargs): + # """ + # From the Py3 bytes docstring: + + # bytes(iterable_of_ints) -> bytes + # bytes(string, encoding[, errors]) -> bytes + # bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer + # bytes(int) -> bytes object of size given by the parameter initialized with null bytes + # bytes() -> empty bytes object + # + # Construct an immutable array of bytes from: + # - an iterable yielding integers in range(256) + # - a text string encoded using the specified encoding + # - any object implementing the buffer API. + # - an integer + # """ + # + # if len(args) == 0: + # return super(newbytes, cls).__new__(cls) + # # Was: elif isinstance(args[0], newbytes): + # # We use type() instead of the above because we're redefining + # # this to be True for all unicode string subclasses. Warning: + # # This may render newstr un-subclassable. + # elif type(args[0]) == newbytes: + # return args[0] + # elif isinstance(args[0], _builtin_bytes): + # value = args[0] + # elif isinstance(args[0], unicode): + # if 'encoding' not in kwargs: + # raise TypeError('unicode string argument without an encoding') + # ### + # # Was: value = args[0].encode(**kwargs) + # # Python 2.6 string encode() method doesn't take kwargs: + # # Use this instead: + # newargs = [kwargs['encoding']] + # if 'errors' in kwargs: + # newargs.append(kwargs['errors']) + # value = args[0].encode(*newargs) + # ### + # elif isinstance(args[0], Iterable): + # if len(args[0]) == 0: + # # What is this? + # raise ValueError('unknown argument type') + # elif len(args[0]) > 0 and isinstance(args[0][0], Integral): + # # It's a list of integers + # value = b''.join([chr(x) for x in args[0]]) + # else: + # raise ValueError('item cannot be interpreted as an integer') + # elif isinstance(args[0], Integral): + # if args[0] < 0: + # raise ValueError('negative count') + # value = b'\x00' * args[0] + # else: + # value = args[0] + # return super(newbytes, cls).__new__(cls, value) + + def __repr__(self): + s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3 + return s[1:] + + def __str__(self): + s = super(oldstr, self).__str__() # e.g. "b'abc'" or "b'abc\\ndef' + # TODO: fix this: + assert s[:2] == "b'" and s[-1] == "'" + return unescape(s[2:-1]) # e.g. 'abc' or 'abc\ndef' + + def __getitem__(self, y): + if isinstance(y, Integral): + return super(oldstr, self).__getitem__(slice(y, y+1)) + else: + return super(oldstr, self).__getitem__(y) + + def __getslice__(self, *args): + return self.__getitem__(slice(*args)) + + def __contains__(self, key): + if isinstance(key, int): + return False + + def __native__(self): + return bytes(self) + + +__all__ = ['oldstr'] diff --git a/.install/.kodi/addons/script.module.future/libs/past/utils/__init__.py b/.install/.kodi/addons/script.module.future/libs/past/utils/__init__.py new file mode 100644 index 000000000..c6606d0b9 --- /dev/null +++ b/.install/.kodi/addons/script.module.future/libs/past/utils/__init__.py @@ -0,0 +1,97 @@ +""" +Various non-built-in utility functions and definitions for Py2 +compatibility in Py3. + +For example: + + >>> # The old_div() function behaves like Python 2's / operator + >>> # without "from __future__ import division" + >>> from past.utils import old_div + >>> old_div(3, 2) # like 3/2 in Py2 + 0 + >>> old_div(3, 2.0) # like 3/2.0 in Py2 + 1.5 +""" + +import sys +import numbers + +PY3 = sys.version_info[0] == 3 +PY2 = sys.version_info[0] == 2 +PYPY = hasattr(sys, 'pypy_translation_info') + + +def with_metaclass(meta, *bases): + """ + Function from jinja2/_compat.py. License: BSD. + + Use it like this:: + + class BaseForm(object): + pass + + class FormType(type): + pass + + class Form(with_metaclass(FormType, BaseForm)): + pass + + This requires a bit of explanation: the basic idea is to make a + dummy metaclass for one level of class instantiation that replaces + itself with the actual metaclass. Because of internal type checks + we also need to make sure that we downgrade the custom metaclass + for one level to something closer to type (that's why __call__ and + __init__ comes back from type etc.). + + This has the advantage over six.with_metaclass of not introducing + dummy classes into the final MRO. + """ + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +def native(obj): + """ + On Py2, this is a no-op: native(obj) -> obj + + On Py3, returns the corresponding native Py3 types that are + superclasses for forward-ported objects from Py2: + + >>> from past.builtins import str, dict + + >>> native(str(b'ABC')) # Output on Py3 follows. On Py2, output is 'ABC' + b'ABC' + >>> type(native(str(b'ABC'))) + bytes + + Existing native types on Py3 will be returned unchanged: + + >>> type(native(b'ABC')) + bytes + """ + if hasattr(obj, '__native__'): + return obj.__native__() + else: + return obj + + +# An alias for future.utils.old_div(): +def old_div(a, b): + """ + Equivalent to ``a / b`` on Python 2 without ``from __future__ import + division``. + + TODO: generalize this to other objects (like arrays etc.) + """ + if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): + return a // b + else: + return a / b + +__all__ = ['PY3', 'PY2', 'PYPY', 'with_metaclass', 'native', 'old_div'] diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/README.md b/.install/.kodi/addons/script.module.inputstreamhelper/README.md index 45be83155..bbdabb9ca 100644 --- a/.install/.kodi/addons/script.module.inputstreamhelper/README.md +++ b/.install/.kodi/addons/script.module.inputstreamhelper/README.md @@ -90,6 +90,9 @@ Please report any issues or bug reports on the [GitHub Issues](https://github.co This module is licensed under the **The MIT License**. Please see the [LICENSE.txt](LICENSE.txt) file for details. ## Releases +### v0.5.2 (2020-12-13) +- Update Chrome OS ARM hardware id's (@mediaminister) + ### v0.5.1 (2020-10-02) - Fix incorrect ARM HWIDs: PHASER and PHASER360 (@dagwieers) - Added Hebrew translations (@haggaie) diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml b/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml index 67de6bb47..92a78707b 100644 --- a/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml +++ b/.install/.kodi/addons/script.module.inputstreamhelper/addon.xml @@ -1,5 +1,5 @@ - + @@ -23,6 +23,9 @@ Un simple module Kodi qui simplifie la vie des développeurs de modules complémentaires en s’appuyant sur des modules complémentaires basés sur InputStream et sur la lecture de DRM. Un módulo Kodi simple que hace la vida más fácil para los desarrolladores de complementos que dependen de complementos basados en InputStream y reproducción de DRM. +v0.5.2 (2020-12-13) +- Update Chrome OS ARM hardware id's + v0.5.1 (2020-10-02) - Fix incorrect ARM HWIDs: PHASER and PHASER360 - Added Hebrew translations diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/__init__.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/__init__.pyo new file mode 100644 index 000000000..e739c5e20 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py index 67fdec006..cbbf45ada 100644 --- a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py +++ b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.py @@ -79,36 +79,24 @@ WIDEVINE_CONFIG_NAME = 'manifest.json' CHROMEOS_RECOVERY_URL = 'https://dl.google.com/dl/edgedl/chromeos/recovery/recovery.json' -# Last updated: 2019-08-20 (version 12239.67.0) +# To keep the Chrome OS ARM hardware ID list up to date, the following resources can be used: +# https://www.chromium.org/chromium-os/developer-information-for-chrome-os-devices +# https://cros-updates-serving.appspot.com/ +# Last updated: 2020-10-05 CHROMEOS_RECOVERY_ARM_HWIDS = [ - # 'ARKHAM', - 'BIG', - 'BLAZE', 'BOB', - # 'DAISY', 'DRUWL', 'DUMO', 'ELM', - 'EXPRESSO', 'FIEVEL', 'HANA', - 'JAQ', - 'JERRY', + 'JUNIPER-HVPU', 'KEVIN', - 'KITTY', + 'KODAMA', + 'KRANE-ZDKS', 'MICKEY', - 'MIGHTY', - 'MINNIE', - 'PI', - 'PIT', - 'RELM', 'SCARLET', - 'SKATE', - 'SNOW', - 'SPEEDY', - 'SPRING', 'TIGER', - # 'WHIRLWIND', ] CHROMEOS_BLOCK_SIZE = 512 diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.pyo new file mode 100644 index 000000000..944d0702c Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/config.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py index aa107c473..0cc292578 100644 --- a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py +++ b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.py @@ -7,6 +7,13 @@ from contextlib import contextmanager import xbmc import xbmcaddon from xbmcgui import DialogProgress, DialogProgressBG + +try: # Kodi v19 or newer + from xbmcvfs import translatePath +except ImportError: # Kodi v18 and older + # pylint: disable=ungrouped-imports + from xbmc import translatePath + from .unicodes import from_unicode, to_unicode # NOTE: We need to explicitly add the add-on id here! @@ -60,7 +67,7 @@ def kodi_version_major(): def translate_path(path): """Translate special xbmc paths""" - return to_unicode(xbmc.translatePath(from_unicode(path))) + return to_unicode(translatePath(from_unicode(path))) def get_addon_info(key): @@ -186,14 +193,11 @@ def get_setting_int(key, default=None): def get_setting_float(key, default=None): """Get an add-on setting as float""" + value = get_setting(key, default) try: - return ADDON.getSettingNumber(key) - except (AttributeError, TypeError): # On Krypton or older, or when not a float - value = get_setting(key, default) - try: - return float(value) - except ValueError: - return default + return float(value) + except ValueError: + return default except RuntimeError: # Occurs when the add-on is disabled return default diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.pyo new file mode 100644 index 000000000..b7669f765 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/kodiutils.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/unicodes.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/unicodes.pyo new file mode 100644 index 000000000..06bd760a0 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/unicodes.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/utils.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/utils.pyo new file mode 100644 index 000000000..5a7114acc Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/utils.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/__init__.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/__init__.pyo new file mode 100644 index 000000000..e225c7f33 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py index 09b13fac7..fd6d3977e 100644 --- a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py +++ b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.py @@ -147,6 +147,7 @@ def install_widevine_arm(backup_path): localize(30018, diskspace=sizeof_fmt(required_diskspace))) return False + log(2, 'Downloading best ChromeOS image for Widevine: {hwid} ({version})'.format(**arm_device)) url = arm_device['url'] downloaded = http_download(url, message=localize(30022), checksum=arm_device['sha1'], hash_alg='sha1', dl_size=int(arm_device['zipfilesize'])) # Downloading the recovery image diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.pyo new file mode 100644 index 000000000..435a439f5 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm_chromeos.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm_chromeos.pyo new file mode 100644 index 000000000..478fcb386 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/arm_chromeos.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/widevine.pyo b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/widevine.pyo new file mode 100644 index 000000000..5c3a4c009 Binary files /dev/null and b/.install/.kodi/addons/script.module.inputstreamhelper/lib/inputstreamhelper/widevine/widevine.pyo differ diff --git a/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml b/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml index 53b42eb1a..4a1dfe3a3 100644 --- a/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml +++ b/.install/.kodi/addons/script.module.inputstreamhelper/resources/settings.xml @@ -1,9 +1,9 @@ - - - + + + diff --git a/.install/.kodi/addons/script.module.parsedom/addon.xml b/.install/.kodi/addons/script.module.parsedom/addon.xml new file mode 100644 index 000000000..e0ee2f436 --- /dev/null +++ b/.install/.kodi/addons/script.module.parsedom/addon.xml @@ -0,0 +1,20 @@ + + + + + + + all + Parsedom for xbmc plugins. + + + + GPLv3 + all + + + http://forum.xbmc.org/showthread.php?tid=116498 + https://github.com/HenrikDK/xbmc-common-plugin-functions + + + \ No newline at end of file diff --git a/.install/.kodi/addons/script.module.parsedom/changelog.txt b/.install/.kodi/addons/script.module.parsedom/changelog.txt new file mode 100644 index 000000000..1d0d94bc8 --- /dev/null +++ b/.install/.kodi/addons/script.module.parsedom/changelog.txt @@ -0,0 +1,30 @@ +[B]Version 1.5.0[/B] +- Fixed: proper fix for getParameters that only affects Frodo branch +- Added: new function to get the version of xbmc as a float + + +[B]Version 1.4.0[/B] +- Special fix for eden branch to unbreak changes for Frodo + +[B]Version 1.3.0[/B] +- Team xbmc decided to stop unquote-ing their path strings, so getParams now does it for them + +[B]Version 1.2.0[/B] +- fetchPage should default to utf-8 encoding +- parseDOM should handle utf-8 encoding + +[B]Version 1.1.0[/B] +- Handle \t that breaks DOM variable extraction +- Added extractJS function + +[B]Version 1.0.0[/B] +- Minor fixes + +[B]Version 0.9.1[/B] +- Stability and more functions +- Add cookie support to fetchPage. +- Add getCookieInfoAsHTML. +- Add POST and Refering capabilities to fetchPage + +[B]Version 0.9.0[/B] +- Initial public test run. diff --git a/.install/.kodi/addons/script.module.parsedom/icon.png b/.install/.kodi/addons/script.module.parsedom/icon.png new file mode 100644 index 000000000..ca4c53adb Binary files /dev/null and b/.install/.kodi/addons/script.module.parsedom/icon.png differ diff --git a/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.py b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.py new file mode 100644 index 000000000..a6e5253eb --- /dev/null +++ b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.py @@ -0,0 +1,558 @@ +''' + Parsedom for XBMC plugins + Copyright (C) 2010-2011 Tobias Ussing And Henrik Mosgaard Jensen + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +''' + +import sys +import urllib +import urllib2 +import re +import io +import inspect +import time +import HTMLParser +#import chardet +import json + +version = u"2.5.1" +plugin = u"CommonFunctions-" + version +print plugin + +USERAGENT = u"Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1" + +if hasattr(sys.modules["__main__"], "xbmc"): + xbmc = sys.modules["__main__"].xbmc +else: + import xbmc + +if hasattr(sys.modules["__main__"], "xbmcgui"): + xbmcgui = sys.modules["__main__"].xbmcgui +else: + import xbmcgui + +if hasattr(sys.modules["__main__"], "dbg"): + dbg = sys.modules["__main__"].dbg +else: + dbg = False + +if hasattr(sys.modules["__main__"], "dbglevel"): + dbglevel = sys.modules["__main__"].dbglevel +else: + dbglevel = 3 + +if hasattr(sys.modules["__main__"], "opener"): + urllib2.install_opener(sys.modules["__main__"].opener) + + +# This function raises a keyboard for user input +def getUserInput(title=u"Input", default=u"", hidden=False): + log("", 5) + result = None + + # Fix for when this functions is called with default=None + if not default: + default = u"" + + keyboard = xbmc.Keyboard(default, title) + keyboard.setHiddenInput(hidden) + keyboard.doModal() + + if keyboard.isConfirmed(): + result = keyboard.getText() + + log(repr(result), 5) + return result + + +# This function raises a keyboard numpad for user input +def getUserInputNumbers(title=u"Input", default=u""): + log("", 5) + result = None + + # Fix for when this functions is called with default=None + if not default: + default = u"" + + keyboard = xbmcgui.Dialog() + result = keyboard.numeric(0, title, default) + + log(repr(result), 5) + return str(result) + + +def getXBMCVersion(): + log("", 3) + version = xbmc.getInfoLabel( "System.BuildVersion" ) + log(version, 3) + for key in ["-", " "]: + if version.find(key) -1: + version = version[:version.find(key)] + version = float(version) + log(repr(version)) + return version + +# Converts the request url passed on by xbmc to the plugin into a dict of key-value pairs +def getParameters(parameterString): + log("", 5) + commands = {} + if getXBMCVersion() >= 12.0: + parameterString = urllib.unquote_plus(parameterString) + splitCommands = parameterString[parameterString.find('?') + 1:].split('&') + + for command in splitCommands: + if (len(command) > 0): + splitCommand = command.split('=') + key = splitCommand[0] + try: + value = splitCommand[1].encode("utf-8") + except: + log("Error utf-8 encoding argument value: " + repr(splitCommand[1])) + value = splitCommand[1] + + commands[key] = value + + log(repr(commands), 5) + return commands + + +def replaceHTMLCodes(txt): + log(repr(txt), 5) + + # Fix missing ; in &#; + txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", makeUTF8(txt)) + + txt = HTMLParser.HTMLParser().unescape(txt) + txt = txt.replace("&", "&") + log(repr(txt), 5) + return txt + + +def stripTags(html): + log(repr(html), 5) + sub_start = html.find("<") + sub_end = html.find(">") + while sub_start < sub_end and sub_start > -1: + html = html.replace(html[sub_start:sub_end + 1], "").strip() + sub_start = html.find("<") + sub_end = html.find(">") + + log(repr(html), 5) + return html + + +def _getDOMContent(html, name, match, ret): # Cleanup + log("match: " + match, 3) + + endstr = u"" + + start = html.find(match) + end = html.find(endstr, start) + pos = html.find("<" + name, start + 1 ) + + log(str(start) + " < " + str(end) + ", pos = " + str(pos) + ", endpos: " + str(end), 8) + + while pos < end and pos != -1: # Ignore too early return + tend = html.find(endstr, end + len(endstr)) + if tend != -1: + end = tend + pos = html.find("<" + name, pos + 1) + log("loop: " + str(start) + " < " + str(end) + " pos = " + str(pos), 8) + + log("start: %s, len: %s, end: %s" % (start, len(match), end), 3) + if start == -1 and end == -1: + result = u"" + elif start > -1 and end > -1: + result = html[start + len(match):end] + elif end > -1: + result = html[:end] + elif start > -1: + result = html[start + len(match):] + + if ret: + endstr = html[end:html.find(">", html.find(endstr)) + 1] + result = match + result + endstr + + log("done result length: " + str(len(result)), 3) + return result + +def _getDOMAttributes(match, name, ret): + log("", 3) + + lst = re.compile('<' + name + '.*?' + ret + '=([\'"].[^>]*?[\'"])>', re.M | re.S).findall(match) + if len(lst) == 0: + lst = re.compile('<' + name + '.*?' + ret + '=(.[^>]*?)>', re.M | re.S).findall(match) + ret = [] + for tmp in lst: + cont_char = tmp[0] + if cont_char in "'\"": + log("Using %s as quotation mark" % cont_char, 3) + + # Limit down to next variable. + if tmp.find('=' + cont_char, tmp.find(cont_char, 1)) > -1: + tmp = tmp[:tmp.find('=' + cont_char, tmp.find(cont_char, 1))] + + # Limit to the last quotation mark + if tmp.rfind(cont_char, 1) > -1: + tmp = tmp[1:tmp.rfind(cont_char)] + else: + log("No quotation mark found", 3) + if tmp.find(" ") > 0: + tmp = tmp[:tmp.find(" ")] + elif tmp.find("/") > 0: + tmp = tmp[:tmp.find("/")] + elif tmp.find(">") > 0: + tmp = tmp[:tmp.find(">")] + + ret.append(tmp.strip()) + + log("Done: " + repr(ret), 3) + return ret + +def _getDOMElements(item, name, attrs): + log("", 3) + + lst = [] + for key in attrs: + lst2 = re.compile('(<' + name + '[^>]*?(?:' + key + '=[\'"]' + attrs[key] + '[\'"].*?>))', re.M | re.S).findall(item) + if len(lst2) == 0 and attrs[key].find(" ") == -1: # Try matching without quotation marks + lst2 = re.compile('(<' + name + '[^>]*?(?:' + key + '=' + attrs[key] + '.*?>))', re.M | re.S).findall(item) + + if len(lst) == 0: + log("Setting main list " + repr(lst2), 5) + lst = lst2 + lst2 = [] + else: + log("Setting new list " + repr(lst2), 5) + test = range(len(lst)) + test.reverse() + for i in test: # Delete anything missing from the next list. + if not lst[i] in lst2: + log("Purging mismatch " + str(len(lst)) + " - " + repr(lst[i]), 3) + del(lst[i]) + + if len(lst) == 0 and attrs == {}: + log("No list found, trying to match on name only", 3) + lst = re.compile('(<' + name + '>)', re.M | re.S).findall(item) + if len(lst) == 0: + lst = re.compile('(<' + name + ' .*?>)', re.M | re.S).findall(item) + + log("Done: " + str(type(lst)), 3) + return lst + +def parseDOM(html, name=u"", attrs={}, ret=False): + log("Name: " + repr(name) + " - Attrs:" + repr(attrs) + " - Ret: " + repr(ret) + " - HTML: " + str(type(html)), 3) + + if isinstance(name, str): # Should be handled + try: + name = name #.decode("utf-8") + except: + log("Couldn't decode name binary string: " + repr(name)) + + if isinstance(html, str): + try: + html = [html.decode("utf-8")] # Replace with chardet thingy + except: + log("Couldn't decode html binary string. Data length: " + repr(len(html))) + html = [html] + elif isinstance(html, unicode): + html = [html] + elif not isinstance(html, list): + log("Input isn't list or string/unicode.") + return u"" + + if not name.strip(): + log("Missing tag name") + return u"" + + ret_lst = [] + for item in html: + temp_item = re.compile('(<[^>]*?\n[^>]*?>)').findall(item) + for match in temp_item: + item = item.replace(match, match.replace("\n", " ")) + + lst = _getDOMElements(item, name, attrs) + + if isinstance(ret, str): + log("Getting attribute %s content for %s matches " % (ret, len(lst) ), 3) + lst2 = [] + for match in lst: + lst2 += _getDOMAttributes(match, name, ret) + lst = lst2 + else: + log("Getting element content for %s matches " % len(lst), 3) + lst2 = [] + for match in lst: + log("Getting element content for %s" % match, 4) + temp = _getDOMContent(item, name, match, ret).strip() + item = item[item.find(temp, item.find(match)) + len(temp):] + lst2.append(temp) + lst = lst2 + ret_lst += lst + + log("Done: " + repr(ret_lst), 3) + return ret_lst + + +def extractJS(data, function=False, variable=False, match=False, evaluate=False, values=False): + log("") + scripts = parseDOM(data, "script") + if len(scripts) == 0: + log("Couldn't find any script tags. Assuming javascript file was given.") + scripts = [data] + + lst = [] + log("Extracting", 4) + for script in scripts: + tmp_lst = [] + if function: + tmp_lst = re.compile(function + '\(.*?\).*?;', re.M | re.S).findall(script) + elif variable: + tmp_lst = re.compile(variable + '[ ]+=.*?;', re.M | re.S).findall(script) + else: + tmp_lst = [script] + if len(tmp_lst) > 0: + log("Found: " + repr(tmp_lst), 4) + lst += tmp_lst + else: + log("Found nothing on: " + script, 4) + + test = range(0, len(lst)) + test.reverse() + for i in test: + if match and lst[i].find(match) == -1: + log("Removing item: " + repr(lst[i]), 10) + del lst[i] + else: + log("Cleaning item: " + repr(lst[i]), 4) + if lst[i][0] == u"\n": + lst[i] == lst[i][1:] + if lst[i][len(lst) -1] == u"\n": + lst[i] == lst[i][:len(lst)- 2] + lst[i] = lst[i].strip() + + if values or evaluate: + for i in range(0, len(lst)): + log("Getting values %s" % lst[i]) + if function: + if evaluate: # include the ( ) for evaluation + data = re.compile("(\(.*?\))", re.M | re.S).findall(lst[i]) + else: + data = re.compile("\((.*?)\)", re.M | re.S).findall(lst[i]) + elif variable: + tlst = re.compile(variable +".*?=.*?;", re.M | re.S).findall(lst[i]) + data = [] + for tmp in tlst: # This breaks for some stuff. "ad_tag": "http://ad-emea.doubleclick.net/N4061/pfadx/com.ytpwatch.entertainment/main_563326'' # ends early, must end with } + cont_char = tmp[0] + cont_char = tmp[tmp.find("=") + 1:].strip() + cont_char = cont_char[0] + if cont_char in "'\"": + log("Using %s as quotation mark" % cont_char, 1) + tmp = tmp[tmp.find(cont_char) + 1:tmp.rfind(cont_char)] + else: + log("No quotation mark found", 1) + tmp = tmp[tmp.find("=") + 1: tmp.rfind(";")] + + tmp = tmp.strip() + if len(tmp) > 0: + data.append(tmp) + else: + log("ERROR: Don't know what to extract values from") + + log("Values extracted: %s" % repr(data)) + if len(data) > 0: + lst[i] = data[0] + + if evaluate: + for i in range(0, len(lst)): + log("Evaluating %s" % lst[i]) + data = lst[i].strip() + try: + try: + lst[i] = json.loads(data) + except: + log("Couldn't json.loads, trying eval") + lst[i] = eval(data) + except: + log("Couldn't eval: %s from %s" % (repr(data), repr(lst[i]))) + + log("Done: " + str(len(lst))) + return lst + +def fetchPage(params={}): + get = params.get + link = get("link") + ret_obj = {} + if get("post_data"): + log("called for : " + repr(params['link'])) + else: + log("called for : " + repr(params)) + + if not link or int(get("error", "0")) > 2: + log("giving up") + ret_obj["status"] = 500 + return ret_obj + + if get("post_data"): + if get("hide_post_data"): + log("Posting data", 2) + else: + log("Posting data: " + urllib.urlencode(get("post_data")), 2) + + request = urllib2.Request(link, urllib.urlencode(get("post_data"))) + request.add_header('Content-Type', 'application/x-www-form-urlencoded') + else: + log("Got request", 2) + request = urllib2.Request(link) + + if get("headers"): + for head in get("headers"): + request.add_header(head[0], head[1]) + + request.add_header('User-Agent', USERAGENT) + + if get("cookie"): + request.add_header('Cookie', get("cookie")) + + if get("refering"): + request.add_header('Referer', get("refering")) + + try: + log("connecting to server...", 1) + + con = urllib2.urlopen(request) + ret_obj["header"] = con.info() + ret_obj["new_url"] = con.geturl() + if get("no-content", "false") == u"false" or get("no-content", "false") == "false": + inputdata = con.read() + #data_type = chardet.detect(inputdata) + #inputdata = inputdata.decode(data_type["encoding"]) + ret_obj["content"] = inputdata.decode("utf-8") + + con.close() + + log("Done") + ret_obj["status"] = 200 + return ret_obj + + except urllib2.HTTPError, e: + err = str(e) + log("HTTPError : " + err) + log("HTTPError - Headers: " + str(e.headers) + " - Content: " + e.fp.read()) + + params["error"] = str(int(get("error", "0")) + 1) + ret = fetchPage(params) + + if not "content" in ret and e.fp: + ret["content"] = e.fp.read() + return ret + + ret_obj["status"] = 500 + return ret_obj + + except urllib2.URLError, e: + err = str(e) + log("URLError : " + err) + + time.sleep(3) + params["error"] = str(int(get("error", "0")) + 1) + ret_obj = fetchPage(params) + return ret_obj + + +def getCookieInfoAsHTML(): + log("", 5) + if hasattr(sys.modules["__main__"], "cookiejar"): + cookiejar = sys.modules["__main__"].cookiejar + + cookie = repr(cookiejar) + cookie = cookie.replace("<_LWPCookieJar.LWPCookieJar[", "") + cookie = cookie.replace("), Cookie(version=0,", ">", ">") + cookie = cookie.replace("Cookie(version=0,", "= 0x02050000: + # return data + + try: + return data.encode('ascii', "ignore") + except: + log("Hit except on : " + repr(data)) + s = u"" + for i in data: + try: + i.encode("ascii", "ignore") + except: + log("Can't convert character", 4) + continue + else: + s += i + + log(repr(s), 5) + return s + + +# This function handles stupid utf handling in python. +def makeUTF8(data): + log(repr(data), 5) + return data + try: + return data.decode('utf8', 'xmlcharrefreplace') # was 'ignore' + except: + log("Hit except on : " + repr(data)) + s = u"" + for i in data: + try: + i.decode("utf8", "xmlcharrefreplace") + except: + log("Can't convert character", 4) + continue + else: + s += i + log(repr(s), 5) + return s + + +def openFile(filepath, options=u"r"): + log(repr(filepath) + " - " + repr(options)) + if options.find("b") == -1: # Toggle binary mode on failure + alternate = options + u"b" + else: + alternate = options.replace(u"b", u"") + + try: + log("Trying normal: %s" % options) + return io.open(filepath, options) + except: + log("Fallback to binary: %s" % alternate) + return io.open(filepath, alternate) + + +def log(description, level=0): + if dbg and dbglevel > level: + try: + xbmc.log((u"[%s] %s : '%s'" % (plugin, inspect.stack()[1][3], description)).decode("utf-8"), xbmc.LOGNOTICE) + except: + xbmc.log(u"FALLBACK [%s] %s : '%s'" % (plugin, inspect.stack()[1][3], repr(description)), xbmc.LOGNOTICE) diff --git a/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.pyo b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.pyo new file mode 100644 index 000000000..756b41a36 Binary files /dev/null and b/.install/.kodi/addons/script.module.parsedom/lib/CommonFunctions.pyo differ diff --git a/.install/.kodi/addons/script.module.pylast/LICENSE.md b/.install/.kodi/addons/script.module.pylast/LICENSE.md new file mode 100644 index 000000000..8dada3eda --- /dev/null +++ b/.install/.kodi/addons/script.module.pylast/LICENSE.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.install/.kodi/addons/script.module.pylast/README.md b/.install/.kodi/addons/script.module.pylast/README.md new file mode 100644 index 000000000..d20a717df --- /dev/null +++ b/.install/.kodi/addons/script.module.pylast/README.md @@ -0,0 +1,103 @@ +pyLast +====== + +[![Build status](https://travis-ci.org/pylast/pylast.svg?branch=develop)](https://travis-ci.org/pylast/pylast) +[![PyPI version](https://img.shields.io/pypi/v/pylast.svg)](https://pypi.python.org/pypi/pylast/) +[![PyPI downloads](https://img.shields.io/pypi/dm/pylast.svg)](https://pypi.python.org/pypi/pylast/) +[![Coverage (Codecov)](https://codecov.io/gh/pylast/pylast/branch/develop/graph/badge.svg)](https://codecov.io/gh/pylast/pylast) +[![Coverage (Coveralls)](https://coveralls.io/repos/github/pylast/pylast/badge.svg?branch=develop)](https://coveralls.io/github/pylast/pylast?branch=develop) +[![Code health](https://landscape.io/github/pylast/pylast/develop/landscape.svg)](https://landscape.io/github/hugovk/pylast/develop) + + +A Python interface to [Last.fm](http://www.last.fm/) and other API-compatible websites such as [Libre.fm](http://libre.fm/). + +Try using the pydoc utility for help on usage or see [test_pylast.py](tests/test_pylast.py) for examples. + +Installation +------------ + +Install via pip: + + pip install pylast + + +Features +-------- + + * Simple public interface. + * Access to all the data exposed by the Last.fm web services. + * Scrobbling support. + * Full object-oriented design. + * Proxy support. + * Internal caching support for some web services calls (disabled by default). + * Support for other API-compatible networks like Libre.fm. + * Python 3-friendly (Starting from 0.5). + + +Getting Started +--------------- + +Here's some simple code example to get you started. In order to create any object from pyLast, you need a `Network` object which represents a social music network that is Last.fm or any other API-compatible one. You can obtain a pre-configured one for Last.fm and use it as follows: + +```python +import pylast + +# You have to have your own unique two values for API_KEY and API_SECRET +# Obtain yours from http://www.last.fm/api/account/create for Last.fm +API_KEY = "b25b959554ed76058ac220b7b2e0a026" # this is a sample key +API_SECRET = "425b55975eed76058ac220b7b4e8a054" + +# In order to perform a write operation you need to authenticate yourself +username = "your_user_name" +password_hash = pylast.md5("your_password") + +network = pylast.LastFMNetwork(api_key=API_KEY, api_secret=API_SECRET, + username=username, password_hash=password_hash) + +# Now you can use that object everywhere +artist = network.get_artist("System of a Down") +artist.shout("<3") + + +track = network.get_track("Iron Maiden", "The Nomad") +track.love() +track.add_tags(("awesome", "favorite")) + +# Type help(pylast.LastFMNetwork) or help(pylast) in a Python interpreter +# to get more help about anything and see examples of how it works +``` + +More examples in hugovk/lastfm-tools and [test_pylast.py](test_pylast.py). + +Testing +------- + +[tests/test_pylast.py](tests/test_pylast.py) contains integration tests with Last.fm, and plenty of code examples. Unit tests are also in the [tests/](tests/) directory. + +For integration tests you need a test account at Last.fm that will become cluttered with test data, and an API key and secret. Either copy [example_test_pylast.yaml](example_test_pylast.yaml) to test_pylast.yaml and fill out the credentials, or set them as environment variables like: + +```sh +export PYLAST_USERNAME=TODO_ENTER_YOURS_HERE +export PYLAST_PASSWORD_HASH=TODO_ENTER_YOURS_HERE +export PYLAST_API_KEY=TODO_ENTER_YOURS_HERE +export PYLAST_API_SECRET=TODO_ENTER_YOURS_HERE +``` + +To run all unit and integration tests: +```sh +pip install pytest flaky mock +py.test +``` + +Or run just one test case: +```sh +py.test -k test_scrobble +``` + +To run with coverage: +```sh +py.test -v --cov pylast --cov-report term-missing +coverage report # for command-line report +coverage html # for HTML report +open htmlcov/index.html +``` diff --git a/.install/.kodi/addons/script.module.pylast/addon.xml b/.install/.kodi/addons/script.module.pylast/addon.xml new file mode 100644 index 000000000..24885c63f --- /dev/null +++ b/.install/.kodi/addons/script.module.pylast/addon.xml @@ -0,0 +1,21 @@ + + + + + + + + + A Python interface to Last.fm and Libre.fm + A Python interface to Last.fm and Libre.fm + + https://pypi.python.org/pypi/pylast + Apache2 + all + https://github.com/pylast/pylast + + + icon.png + + + diff --git a/.install/.kodi/addons/script.module.pylast/icon.png b/.install/.kodi/addons/script.module.pylast/icon.png new file mode 100644 index 000000000..ca4c53adb Binary files /dev/null and b/.install/.kodi/addons/script.module.pylast/icon.png differ diff --git a/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.py b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.py new file mode 100644 index 000000000..e533fde29 --- /dev/null +++ b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.py @@ -0,0 +1,4614 @@ +# -*- coding: utf-8 -*- +# +# pylast - +# A Python interface to Last.fm and Libre.fm +# +# Copyright 2008-2010 Amr Hassan +# Copyright 2013-2017 hugovk +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# https://github.com/pylast/pylast + +import hashlib +from xml.dom import minidom, Node +import xml.dom +import time +import shelve +import tempfile +import sys +import collections +import warnings +import re +import six + +__version__ = '1.8.0' +__author__ = 'Amr Hassan, hugovk' +__copyright__ = "Copyright (C) 2008-2010 Amr Hassan, 2013-2017 hugovk" +__license__ = "apache2" +__email__ = 'amr.hassan@gmail.com' + + +def _deprecation_warning(message): + warnings.warn(message, DeprecationWarning) + + +def _can_use_ssl_securely(): + # Python 3.3 doesn't support create_default_context() but can be made to + # work sanely. + # <2.7.9 and <3.2 never did any SSL verification so don't do SSL there. + # >3.4 and >2.7.9 has sane defaults so use SSL there. + v = sys.version_info + return v > (3, 3) or ((2, 7, 9) < v < (3, 0)) + + +if _can_use_ssl_securely(): + import ssl + +if sys.version_info[0] == 3: + if _can_use_ssl_securely(): + from http.client import HTTPSConnection + else: + from http.client import HTTPConnection + import html.entities as htmlentitydefs + from urllib.parse import splithost as url_split_host + from urllib.parse import quote_plus as url_quote_plus + + unichr = chr + +elif sys.version_info[0] == 2: + if _can_use_ssl_securely(): + from httplib import HTTPSConnection + else: + from httplib import HTTPConnection + import htmlentitydefs + from urllib import splithost as url_split_host + from urllib import quote_plus as url_quote_plus + +STATUS_INVALID_SERVICE = 2 +STATUS_INVALID_METHOD = 3 +STATUS_AUTH_FAILED = 4 +STATUS_INVALID_FORMAT = 5 +STATUS_INVALID_PARAMS = 6 +STATUS_INVALID_RESOURCE = 7 +STATUS_TOKEN_ERROR = 8 +STATUS_INVALID_SK = 9 +STATUS_INVALID_API_KEY = 10 +STATUS_OFFLINE = 11 +STATUS_SUBSCRIBERS_ONLY = 12 +STATUS_INVALID_SIGNATURE = 13 +STATUS_TOKEN_UNAUTHORIZED = 14 +STATUS_TOKEN_EXPIRED = 15 + +EVENT_ATTENDING = '0' +EVENT_MAYBE_ATTENDING = '1' +EVENT_NOT_ATTENDING = '2' + +PERIOD_OVERALL = 'overall' +PERIOD_7DAYS = '7day' +PERIOD_1MONTH = '1month' +PERIOD_3MONTHS = '3month' +PERIOD_6MONTHS = '6month' +PERIOD_12MONTHS = '12month' + +DOMAIN_ENGLISH = 0 +DOMAIN_GERMAN = 1 +DOMAIN_SPANISH = 2 +DOMAIN_FRENCH = 3 +DOMAIN_ITALIAN = 4 +DOMAIN_POLISH = 5 +DOMAIN_PORTUGUESE = 6 +DOMAIN_SWEDISH = 7 +DOMAIN_TURKISH = 8 +DOMAIN_RUSSIAN = 9 +DOMAIN_JAPANESE = 10 +DOMAIN_CHINESE = 11 + +COVER_SMALL = 0 +COVER_MEDIUM = 1 +COVER_LARGE = 2 +COVER_EXTRA_LARGE = 3 +COVER_MEGA = 4 + +IMAGES_ORDER_POPULARITY = "popularity" +IMAGES_ORDER_DATE = "dateadded" + + +USER_MALE = 'Male' +USER_FEMALE = 'Female' + +SCROBBLE_SOURCE_USER = "P" +SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST = "R" +SCROBBLE_SOURCE_PERSONALIZED_BROADCAST = "E" +SCROBBLE_SOURCE_LASTFM = "L" +SCROBBLE_SOURCE_UNKNOWN = "U" + +SCROBBLE_MODE_PLAYED = "" +SCROBBLE_MODE_LOVED = "L" +SCROBBLE_MODE_BANNED = "B" +SCROBBLE_MODE_SKIPPED = "S" + +# From http://boodebr.org/main/python/all-about-python-and-unicode#UNI_XML +RE_XML_ILLEGAL = (u'([\u0000-\u0008\u000b-\u000c\u000e-\u001f\ufffe-\uffff])' + + u'|' + + u'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])' + % + (unichr(0xd800), unichr(0xdbff), unichr(0xdc00), + unichr(0xdfff), unichr(0xd800), unichr(0xdbff), + unichr(0xdc00), unichr(0xdfff), unichr(0xd800), + unichr(0xdbff), unichr(0xdc00), unichr(0xdfff))) + +XML_ILLEGAL = re.compile(RE_XML_ILLEGAL) + +# Python <=3.3 doesn't support create_default_context() +# <2.7.9 and <3.2 never did any SSL verification +# FIXME This can be removed after 2017-09 when 3.3 is no longer supported and +# pypy3 uses 3.4 or later, see +# https://en.wikipedia.org/wiki/CPython#Version_history +if sys.version_info[0] == 3 and sys.version_info[1] == 3: + import certifi + SSL_CONTEXT = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + SSL_CONTEXT.verify_mode = ssl.CERT_REQUIRED + SSL_CONTEXT.options |= ssl.OP_NO_COMPRESSION + # Intermediate from https://wiki.mozilla.org/Security/Server_Side_TLS + # Create the cipher string + cipher_string = """ + ECDHE-ECDSA-CHACHA20-POLY1305 + ECDHE-RSA-CHACHA20-POLY1305 + ECDHE-ECDSA-AES128-GCM-SHA256 + ECDHE-RSA-AES128-GCM-SHA256 + ECDHE-ECDSA-AES256-GCM-SHA384 + ECDHE-RSA-AES256-GCM-SHA384 + DHE-RSA-AES128-GCM-SHA256 + DHE-RSA-AES256-GCM-SHA384 + ECDHE-ECDSA-AES128-SHA256 + ECDHE-RSA-AES128-SHA256 + ECDHE-ECDSA-AES128-SHA + ECDHE-RSA-AES256-SHA384 + ECDHE-RSA-AES128-SHA + ECDHE-ECDSA-AES256-SHA384 + ECDHE-ECDSA-AES256-SHA + ECDHE-RSA-AES256-SHA + DHE-RSA-AES128-SHA256 + DHE-RSA-AES128-SHA + DHE-RSA-AES256-SHA256 + DHE-RSA-AES256-SHA + ECDHE-ECDSA-DES-CBC3-SHA + ECDHE-RSA-DES-CBC3-SHA + EDH-RSA-DES-CBC3-SHA + AES128-GCM-SHA256 + AES256-GCM-SHA384 + AES128-SHA256 + AES256-SHA256 + AES128-SHA + AES256-SHA + DES-CBC3-SHA + !DSS + """ + cipher_string = ' '.join(cipher_string.split()) + SSL_CONTEXT.set_ciphers(cipher_string) + SSL_CONTEXT.load_verify_locations(certifi.where()) + +# Python >3.4 and >2.7.9 has sane defaults +elif sys.version_info > (3, 4) or ((2, 7, 9) < sys.version_info < (3, 0)): + SSL_CONTEXT = ssl.create_default_context() + + +class _Network(object): + """ + A music social network website such as Last.fm or + one with a Last.fm-compatible API. + """ + + def __init__( + self, name, homepage, ws_server, api_key, api_secret, session_key, + submission_server, username, password_hash, domain_names, urls, + token=None): + """ + name: the name of the network + homepage: the homepage URL + ws_server: the URL of the webservices server + api_key: a provided API_KEY + api_secret: a provided API_SECRET + session_key: a generated session_key or None + submission_server: the URL of the server to which tracks are + submitted (scrobbled) + username: a username of a valid user + password_hash: the output of pylast.md5(password) where password is + the user's password + domain_names: a dict mapping each DOMAIN_* value to a string domain + name + urls: a dict mapping types to URLs + token: an authentication token to retrieve a session + + if username and password_hash were provided and not session_key, + session_key will be generated automatically when needed. + + Either a valid session_key or a combination of username and + password_hash must be present for scrobbling. + + You should use a preconfigured network object through a + get_*_network(...) method instead of creating an object + of this class, unless you know what you're doing. + """ + + self.name = name + self.homepage = homepage + self.ws_server = ws_server + self.api_key = api_key + self.api_secret = api_secret + self.session_key = session_key + self.submission_server = submission_server + self.username = username + self.password_hash = password_hash + self.domain_names = domain_names + self.urls = urls + + self.cache_backend = None + self.proxy_enabled = False + self.proxy = None + self.last_call_time = 0 + self.limit_rate = False + + # Load session_key from authentication token if provided + if token and not self.session_key: + sk_gen = SessionKeyGenerator(self) + self.session_key = sk_gen.get_web_auth_session_key( + url=None, token=token) + + # Generate a session_key if necessary + if ((self.api_key and self.api_secret) and not self.session_key and + (self.username and self.password_hash)): + sk_gen = SessionKeyGenerator(self) + self.session_key = sk_gen.get_session_key( + self.username, self.password_hash) + + def __str__(self): + return "%s Network" % self.name + + def get_artist(self, artist_name): + """ + Return an Artist object + """ + + return Artist(artist_name, self) + + def get_track(self, artist, title): + """ + Return a Track object + """ + + return Track(artist, title, self) + + def get_album(self, artist, title): + """ + Return an Album object + """ + + return Album(artist, title, self) + + def get_authenticated_user(self): + """ + Returns the authenticated user + """ + + return AuthenticatedUser(self) + + def get_country(self, country_name): + """ + Returns a country object + """ + + return Country(country_name, self) + + def get_metro(self, metro_name, country_name): + """ + Returns a metro object + """ + + return Metro(metro_name, country_name, self) + + def get_group(self, name): + """ + Returns a Group object + """ + + return Group(name, self) + + def get_user(self, username): + """ + Returns a user object + """ + + return User(username, self) + + def get_tag(self, name): + """ + Returns a tag object + """ + + return Tag(name, self) + + def get_scrobbler(self, client_id, client_version): + """ + Returns a Scrobbler object used for submitting tracks to the server + + Quote from http://www.last.fm/api/submissions: + ======== + Client identifiers are used to provide a centrally managed database + of the client versions, allowing clients to be banned if they are + found to be behaving undesirably. The client ID is associated with + a version number on the server, however these are only incremented + if a client is banned and do not have to reflect the version of the + actual client application. + + During development, clients which have not been allocated an + identifier should use the identifier tst, with a version number of + 1.0. Do not distribute code or client implementations which use + this test identifier. Do not use the identifiers used by other + clients. + ========= + + To obtain a new client identifier please contact: + * Last.fm: submissions@last.fm + * # TODO: list others + + ...and provide us with the name of your client and its homepage + address. + """ + + _deprecation_warning( + "Use _Network.scrobble(...), _Network.scrobble_many(...)," + " and Network.update_now_playing(...) instead") + + return Scrobbler(self, client_id, client_version) + + def _get_language_domain(self, domain_language): + """ + Returns the mapped domain name of the network to a DOMAIN_* value + """ + + if domain_language in self.domain_names: + return self.domain_names[domain_language] + + def _get_url(self, domain, url_type): + return "http://%s/%s" % ( + self._get_language_domain(domain), self.urls[url_type]) + + def _get_ws_auth(self): + """ + Returns an (API_KEY, API_SECRET, SESSION_KEY) tuple. + """ + return (self.api_key, self.api_secret, self.session_key) + + def _delay_call(self): + """ + Makes sure that web service calls are at least 0.2 seconds apart. + """ + + # Delay time in seconds from section 4.4 of http://www.last.fm/api/tos + DELAY_TIME = 0.2 + now = time.time() + + time_since_last = now - self.last_call_time + + if time_since_last < DELAY_TIME: + time.sleep(DELAY_TIME - time_since_last) + + self.last_call_time = now + + def create_new_playlist(self, title, description): + """ + Creates a playlist for the authenticated user and returns it + title: The title of the new playlist. + description: The description of the new playlist. + """ + + params = {} + params['title'] = title + params['description'] = description + + doc = _Request(self, 'playlist.create', params).execute(False) + + e_id = doc.getElementsByTagName("id")[0].firstChild.data + user = doc.getElementsByTagName('playlists')[0].getAttribute('user') + + return Playlist(user, e_id, self) + + def get_top_artists(self, limit=None, cacheable=True): + """Returns the most played artists as a sequence of TopItem objects.""" + + params = {} + if limit: + params["limit"] = limit + + doc = _Request(self, "chart.getTopArtists", params).execute(cacheable) + + return _extract_top_artists(doc, self) + + def get_top_tracks(self, limit=None, cacheable=True): + """Returns the most played tracks as a sequence of TopItem objects.""" + + params = {} + if limit: + params["limit"] = limit + + doc = _Request(self, "chart.getTopTracks", params).execute(cacheable) + + seq = [] + for node in doc.getElementsByTagName("track"): + title = _extract(node, "name") + artist = _extract(node, "name", 1) + track = Track(artist, title, self) + weight = _number(_extract(node, "playcount")) + seq.append(TopItem(track, weight)) + + return seq + + def get_top_tags(self, limit=None, cacheable=True): + """Returns the most used tags as a sequence of TopItem objects.""" + + # Last.fm has no "limit" parameter for tag.getTopTags + # so we need to get all (250) and then limit locally + doc = _Request(self, "tag.getTopTags").execute(cacheable) + + seq = [] + for node in doc.getElementsByTagName("tag"): + if limit and len(seq) >= limit: + break + tag = Tag(_extract(node, "name"), self) + weight = _number(_extract(node, "count")) + seq.append(TopItem(tag, weight)) + + return seq + + def get_geo_events( + self, longitude=None, latitude=None, location=None, distance=None, + tag=None, festivalsonly=None, limit=None, cacheable=True): + """ + Returns all events in a specific location by country or city name. + Parameters: + longitude (Optional) : Specifies a longitude value to retrieve events + for (service returns nearby events by default) + latitude (Optional) : Specifies a latitude value to retrieve events for + (service returns nearby events by default) + location (Optional) : Specifies a location to retrieve events for + (service returns nearby events by default) + distance (Optional) : Find events within a specified radius + (in kilometres) + tag (Optional) : Specifies a tag to filter by. + festivalsonly[0|1] (Optional) : Whether only festivals should be + returned, or all events. + limit (Optional) : The number of results to fetch per page. + Defaults to 10. + """ + + params = {} + + if longitude: + params["long"] = longitude + if latitude: + params["lat"] = latitude + if location: + params["location"] = location + if limit: + params["limit"] = limit + if distance: + params["distance"] = distance + if tag: + params["tag"] = tag + if festivalsonly: + params["festivalsonly"] = 1 + elif not festivalsonly: + params["festivalsonly"] = 0 + + doc = _Request(self, "geo.getEvents", params).execute(cacheable) + + return _extract_events_from_doc(doc, self) + + def get_metro_weekly_chart_dates(self, cacheable=True): + """ + Returns a list of From and To tuples for the available metro charts. + """ + + doc = _Request(self, "geo.getMetroWeeklyChartlist").execute(cacheable) + + seq = [] + for node in doc.getElementsByTagName("chart"): + seq.append((node.getAttribute("from"), node.getAttribute("to"))) + + return seq + + def get_metros(self, country=None, cacheable=True): + """ + Get a list of valid countries and metros for use in the other + webservices. + Parameters: + country (Optional) : Optionally restrict the results to those Metros + from a particular country, as defined by the ISO 3166-1 country + names standard. + """ + params = {} + + if country: + params["country"] = country + + doc = _Request(self, "geo.getMetros", params).execute(cacheable) + + metros = doc.getElementsByTagName("metro") + seq = [] + + for metro in metros: + name = _extract(metro, "name") + country = _extract(metro, "country") + + seq.append(Metro(name, country, self)) + + return seq + + def get_geo_top_artists(self, country, limit=None, cacheable=True): + """Get the most popular artists on Last.fm by country. + Parameters: + country (Required) : A country name, as defined by the ISO 3166-1 + country names standard. + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + params = {"country": country} + + if limit: + params["limit"] = limit + + doc = _Request(self, "geo.getTopArtists", params).execute(cacheable) + + return _extract_top_artists(doc, self) + + def get_geo_top_tracks( + self, country, location=None, limit=None, cacheable=True): + """Get the most popular tracks on Last.fm last week by country. + Parameters: + country (Required) : A country name, as defined by the ISO 3166-1 + country names standard + location (Optional) : A metro name, to fetch the charts for + (must be within the country specified) + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + params = {"country": country} + + if location: + params["location"] = location + if limit: + params["limit"] = limit + + doc = _Request(self, "geo.getTopTracks", params).execute(cacheable) + + tracks = doc.getElementsByTagName("track") + seq = [] + + for track in tracks: + title = _extract(track, "name") + artist = _extract(track, "name", 1) + listeners = _extract(track, "listeners") + + seq.append(TopItem(Track(artist, title, self), listeners)) + + return seq + + def enable_proxy(self, host, port): + """Enable a default web proxy""" + + self.proxy = [host, _number(port)] + self.proxy_enabled = True + + def disable_proxy(self): + """Disable using the web proxy""" + + self.proxy_enabled = False + + def is_proxy_enabled(self): + """Returns True if a web proxy is enabled.""" + + return self.proxy_enabled + + def _get_proxy(self): + """Returns proxy details.""" + + return self.proxy + + def enable_rate_limit(self): + """Enables rate limiting for this network""" + self.limit_rate = True + + def disable_rate_limit(self): + """Disables rate limiting for this network""" + self.limit_rate = False + + def is_rate_limited(self): + """Return True if web service calls are rate limited""" + return self.limit_rate + + def enable_caching(self, file_path=None): + """Enables caching request-wide for all cacheable calls. + + * file_path: A file path for the backend storage file. If + None set, a temp file would probably be created, according the backend. + """ + + if not file_path: + file_path = tempfile.mktemp(prefix="pylast_tmp_") + + self.cache_backend = _ShelfCacheBackend(file_path) + + def disable_caching(self): + """Disables all caching features.""" + + self.cache_backend = None + + def is_caching_enabled(self): + """Returns True if caching is enabled.""" + + return not (self.cache_backend is None) + + def _get_cache_backend(self): + + return self.cache_backend + + def search_for_album(self, album_name): + """Searches for an album by its name. Returns a AlbumSearch object. + Use get_next_page() to retrieve sequences of results.""" + + return AlbumSearch(album_name, self) + + def search_for_artist(self, artist_name): + """Searches of an artist by its name. Returns a ArtistSearch object. + Use get_next_page() to retrieve sequences of results.""" + + return ArtistSearch(artist_name, self) + + def search_for_tag(self, tag_name): + """Searches of a tag by its name. Returns a TagSearch object. + Use get_next_page() to retrieve sequences of results.""" + + return TagSearch(tag_name, self) + + def search_for_track(self, artist_name, track_name): + """Searches of a track by its name and its artist. Set artist to an + empty string if not available. + Returns a TrackSearch object. + Use get_next_page() to retrieve sequences of results.""" + + return TrackSearch(artist_name, track_name, self) + + def search_for_venue(self, venue_name, country_name): + """Searches of a venue by its name and its country. Set country_name to + an empty string if not available. + Returns a VenueSearch object. + Use get_next_page() to retrieve sequences of results.""" + + return VenueSearch(venue_name, country_name, self) + + def get_track_by_mbid(self, mbid): + """Looks up a track by its MusicBrainz ID""" + + params = {"mbid": mbid} + + doc = _Request(self, "track.getInfo", params).execute(True) + + return Track(_extract(doc, "name", 1), _extract(doc, "name"), self) + + def get_artist_by_mbid(self, mbid): + """Loooks up an artist by its MusicBrainz ID""" + + params = {"mbid": mbid} + + doc = _Request(self, "artist.getInfo", params).execute(True) + + return Artist(_extract(doc, "name"), self) + + def get_album_by_mbid(self, mbid): + """Looks up an album by its MusicBrainz ID""" + + params = {"mbid": mbid} + + doc = _Request(self, "album.getInfo", params).execute(True) + + return Album(_extract(doc, "artist"), _extract(doc, "name"), self) + + def update_now_playing( + self, artist, title, album=None, album_artist=None, + duration=None, track_number=None, mbid=None, context=None): + """ + Used to notify Last.fm that a user has started listening to a track. + + Parameters: + artist (Required) : The artist name + title (Required) : The track title + album (Optional) : The album name. + album_artist (Optional) : The album artist - if this differs + from the track artist. + duration (Optional) : The length of the track in seconds. + track_number (Optional) : The track number of the track on the + album. + mbid (Optional) : The MusicBrainz Track ID. + context (Optional) : Sub-client version + (not public, only enabled for certain API keys) + """ + + params = {"track": title, "artist": artist} + + if album: + params["album"] = album + if album_artist: + params["albumArtist"] = album_artist + if context: + params["context"] = context + if track_number: + params["trackNumber"] = track_number + if mbid: + params["mbid"] = mbid + if duration: + params["duration"] = duration + + _Request(self, "track.updateNowPlaying", params).execute() + + def scrobble( + self, artist, title, timestamp, album=None, album_artist=None, + track_number=None, duration=None, stream_id=None, context=None, + mbid=None): + + """Used to add a track-play to a user's profile. + + Parameters: + artist (Required) : The artist name. + title (Required) : The track name. + timestamp (Required) : The time the track started playing, in UNIX + timestamp format (integer number of seconds since 00:00:00, + January 1st 1970 UTC). This must be in the UTC time zone. + album (Optional) : The album name. + album_artist (Optional) : The album artist - if this differs from + the track artist. + context (Optional) : Sub-client version (not public, only enabled + for certain API keys) + stream_id (Optional) : The stream id for this track received from + the radio.getPlaylist service. + track_number (Optional) : The track number of the track on the + album. + mbid (Optional) : The MusicBrainz Track ID. + duration (Optional) : The length of the track in seconds. + """ + + return self.scrobble_many(({ + "artist": artist, "title": title, "timestamp": timestamp, + "album": album, "album_artist": album_artist, + "track_number": track_number, "duration": duration, + "stream_id": stream_id, "context": context, "mbid": mbid},)) + + def scrobble_many(self, tracks): + """ + Used to scrobble a batch of tracks at once. The parameter tracks is a + sequence of dicts per track containing the keyword arguments as if + passed to the scrobble() method. + """ + + tracks_to_scrobble = tracks[:50] + if len(tracks) > 50: + remaining_tracks = tracks[50:] + else: + remaining_tracks = None + + params = {} + for i in range(len(tracks_to_scrobble)): + + params["artist[%d]" % i] = tracks_to_scrobble[i]["artist"] + params["track[%d]" % i] = tracks_to_scrobble[i]["title"] + + additional_args = ( + "timestamp", "album", "album_artist", "context", + "stream_id", "track_number", "mbid", "duration") + args_map_to = { # so friggin lazy + "album_artist": "albumArtist", + "track_number": "trackNumber", + "stream_id": "streamID"} + + for arg in additional_args: + + if arg in tracks_to_scrobble[i] and tracks_to_scrobble[i][arg]: + if arg in args_map_to: + maps_to = args_map_to[arg] + else: + maps_to = arg + + params[ + "%s[%d]" % (maps_to, i)] = tracks_to_scrobble[i][arg] + + _Request(self, "track.scrobble", params).execute() + + if remaining_tracks: + self.scrobble_many(remaining_tracks) + + def get_play_links(self, link_type, things, cacheable=True): + method = link_type + ".getPlaylinks" + params = {} + + for i, thing in enumerate(things): + if link_type == "artist": + params['artist[' + str(i) + ']'] = thing + elif link_type == "album": + params['artist[' + str(i) + ']'] = thing.artist + params['album[' + str(i) + ']'] = thing.title + elif link_type == "track": + params['artist[' + str(i) + ']'] = thing.artist + params['track[' + str(i) + ']'] = thing.title + + doc = _Request(self, method, params).execute(cacheable) + + seq = [] + + for node in doc.getElementsByTagName("externalids"): + spotify = _extract(node, "spotify") + seq.append(spotify) + + return seq + + def get_artist_play_links(self, artists, cacheable=True): + return self.get_play_links("artist", artists, cacheable) + + def get_album_play_links(self, albums, cacheable=True): + return self.get_play_links("album", albums, cacheable) + + def get_track_play_links(self, tracks, cacheable=True): + return self.get_play_links("track", tracks, cacheable) + + +class LastFMNetwork(_Network): + + """A Last.fm network object + + api_key: a provided API_KEY + api_secret: a provided API_SECRET + session_key: a generated session_key or None + username: a username of a valid user + password_hash: the output of pylast.md5(password) where password is the + user's password + + if username and password_hash were provided and not session_key, + session_key will be generated automatically when needed. + + Either a valid session_key or a combination of username and password_hash + must be present for scrobbling. + + Most read-only webservices only require an api_key and an api_secret, see + about obtaining them from: + http://www.last.fm/api/account + """ + + def __init__( + self, api_key="", api_secret="", session_key="", username="", + password_hash="", token=""): + _Network.__init__( + self, + name="Last.fm", + homepage="http://last.fm", + ws_server=("ws.audioscrobbler.com", "/2.0/"), + api_key=api_key, + api_secret=api_secret, + session_key=session_key, + submission_server="http://post.audioscrobbler.com:80/", + username=username, + password_hash=password_hash, + token=token, + domain_names={ + DOMAIN_ENGLISH: 'www.last.fm', + DOMAIN_GERMAN: 'www.lastfm.de', + DOMAIN_SPANISH: 'www.lastfm.es', + DOMAIN_FRENCH: 'www.lastfm.fr', + DOMAIN_ITALIAN: 'www.lastfm.it', + DOMAIN_POLISH: 'www.lastfm.pl', + DOMAIN_PORTUGUESE: 'www.lastfm.com.br', + DOMAIN_SWEDISH: 'www.lastfm.se', + DOMAIN_TURKISH: 'www.lastfm.com.tr', + DOMAIN_RUSSIAN: 'www.lastfm.ru', + DOMAIN_JAPANESE: 'www.lastfm.jp', + DOMAIN_CHINESE: 'cn.last.fm', + }, + urls={ + "album": "music/%(artist)s/%(album)s", + "artist": "music/%(artist)s", + "event": "event/%(id)s", + "country": "place/%(country_name)s", + "playlist": "user/%(user)s/library/playlists/%(appendix)s", + "tag": "tag/%(name)s", + "track": "music/%(artist)s/_/%(title)s", + "group": "group/%(name)s", + "user": "user/%(name)s", + } + ) + + def __repr__(self): + return "pylast.LastFMNetwork(%s)" % (", ".join( + ("'%s'" % self.api_key, + "'%s'" % self.api_secret, + "'%s'" % self.session_key, + "'%s'" % self.username, + "'%s'" % self.password_hash))) + + +def get_lastfm_network( + api_key="", api_secret="", session_key="", username="", + password_hash="", token=""): + """ + Returns a preconfigured _Network object for Last.fm + + api_key: a provided API_KEY + api_secret: a provided API_SECRET + session_key: a generated session_key or None + username: a username of a valid user + password_hash: the output of pylast.md5(password) where password is the + user's password + token: an authentication token to retrieve a session + + if username and password_hash were provided and not session_key, + session_key will be generated automatically when needed. + + Either a valid session_key, a combination of username and password_hash, + or token must be present for scrobbling. + + Most read-only webservices only require an api_key and an api_secret, see + about obtaining them from: + http://www.last.fm/api/account + """ + + _deprecation_warning("Create a LastFMNetwork object instead") + + return LastFMNetwork( + api_key, api_secret, session_key, username, password_hash, token) + + +class LibreFMNetwork(_Network): + """ + A preconfigured _Network object for Libre.fm + + api_key: a provided API_KEY + api_secret: a provided API_SECRET + session_key: a generated session_key or None + username: a username of a valid user + password_hash: the output of pylast.md5(password) where password is the + user's password + + if username and password_hash were provided and not session_key, + session_key will be generated automatically when needed. + """ + + def __init__( + self, api_key="", api_secret="", session_key="", username="", + password_hash=""): + + _Network.__init__( + self, + name="Libre.fm", + homepage="http://libre.fm", + ws_server=("libre.fm", "/2.0/"), + api_key=api_key, + api_secret=api_secret, + session_key=session_key, + submission_server="http://turtle.libre.fm:80/", + username=username, + password_hash=password_hash, + domain_names={ + DOMAIN_ENGLISH: "libre.fm", + DOMAIN_GERMAN: "libre.fm", + DOMAIN_SPANISH: "libre.fm", + DOMAIN_FRENCH: "libre.fm", + DOMAIN_ITALIAN: "libre.fm", + DOMAIN_POLISH: "libre.fm", + DOMAIN_PORTUGUESE: "libre.fm", + DOMAIN_SWEDISH: "libre.fm", + DOMAIN_TURKISH: "libre.fm", + DOMAIN_RUSSIAN: "libre.fm", + DOMAIN_JAPANESE: "libre.fm", + DOMAIN_CHINESE: "libre.fm", + }, + urls={ + "album": "artist/%(artist)s/album/%(album)s", + "artist": "artist/%(artist)s", + "event": "event/%(id)s", + "country": "place/%(country_name)s", + "playlist": "user/%(user)s/library/playlists/%(appendix)s", + "tag": "tag/%(name)s", + "track": "music/%(artist)s/_/%(title)s", + "group": "group/%(name)s", + "user": "user/%(name)s", + } + ) + + def __repr__(self): + return "pylast.LibreFMNetwork(%s)" % (", ".join( + ("'%s'" % self.api_key, + "'%s'" % self.api_secret, + "'%s'" % self.session_key, + "'%s'" % self.username, + "'%s'" % self.password_hash))) + + +def get_librefm_network( + api_key="", api_secret="", session_key="", username="", + password_hash=""): + """ + Returns a preconfigured _Network object for Libre.fm + + api_key: a provided API_KEY + api_secret: a provided API_SECRET + session_key: a generated session_key or None + username: a username of a valid user + password_hash: the output of pylast.md5(password) where password is the + user's password + + if username and password_hash were provided and not session_key, + session_key will be generated automatically when needed. + """ + + _deprecation_warning( + "DeprecationWarning: Create a LibreFMNetwork object instead") + + return LibreFMNetwork( + api_key, api_secret, session_key, username, password_hash) + + +class _ShelfCacheBackend(object): + """Used as a backend for caching cacheable requests.""" + def __init__(self, file_path=None): + self.shelf = shelve.open(file_path) + + def __iter__(self): + return iter(self.shelf.keys()) + + def get_xml(self, key): + return self.shelf[key] + + def set_xml(self, key, xml_string): + self.shelf[key] = xml_string + + +class _Request(object): + """Representing an abstract web service operation.""" + + def __init__(self, network, method_name, params={}): + + self.network = network + self.params = {} + + for key in params: + self.params[key] = _unicode(params[key]) + + (self.api_key, self.api_secret, self.session_key) = \ + network._get_ws_auth() + + self.params["api_key"] = self.api_key + self.params["method"] = method_name + + if network.is_caching_enabled(): + self.cache = network._get_cache_backend() + + if self.session_key: + self.params["sk"] = self.session_key + self.sign_it() + + def sign_it(self): + """Sign this request.""" + + if "api_sig" not in self.params.keys(): + self.params['api_sig'] = self._get_signature() + + def _get_signature(self): + """ + Returns a 32-character hexadecimal md5 hash of the signature string. + """ + + keys = list(self.params.keys()) + + keys.sort() + + string = "" + + for name in keys: + string += name + string += self.params[name] + + string += self.api_secret + + return md5(string) + + def _get_cache_key(self): + """ + The cache key is a string of concatenated sorted names and values. + """ + + keys = list(self.params.keys()) + keys.sort() + + cache_key = str() + + for key in keys: + if key != "api_sig" and key != "api_key" and key != "sk": + cache_key += key + self.params[key] + + return hashlib.sha1(cache_key.encode("utf-8")).hexdigest() + + def _get_cached_response(self): + """Returns a file object of the cached response.""" + + if not self._is_cached(): + response = self._download_response() + self.cache.set_xml(self._get_cache_key(), response) + + return self.cache.get_xml(self._get_cache_key()) + + def _is_cached(self): + """Returns True if the request is already in cache.""" + + return self._get_cache_key() in self.cache + + def _download_response(self): + """Returns a response body string from the server.""" + + if self.network.limit_rate: + self.network._delay_call() + + data = [] + for name in self.params.keys(): + data.append('='.join(( + name, url_quote_plus(_string(self.params[name]))))) + data = '&'.join(data) + + headers = { + "Content-type": "application/x-www-form-urlencoded", + 'Accept-Charset': 'utf-8', + 'User-Agent': "pylast" + '/' + __version__ + } + + (HOST_NAME, HOST_SUBDIR) = self.network.ws_server + + if self.network.is_proxy_enabled(): + if _can_use_ssl_securely(): + conn = HTTPSConnection( + context=SSL_CONTEXT, + host=self.network._get_proxy()[0], + port=self.network._get_proxy()[1]) + else: + conn = HTTPConnection( + host=self.network._get_proxy()[0], + port=self.network._get_proxy()[1]) + + try: + conn.request( + method='POST', url="http://" + HOST_NAME + HOST_SUBDIR, + body=data, headers=headers) + except Exception as e: + raise NetworkError(self.network, e) + + else: + if _can_use_ssl_securely(): + conn = HTTPSConnection( + context=SSL_CONTEXT, + host=HOST_NAME + ) + else: + conn = HTTPConnection( + host=HOST_NAME + ) + + try: + conn.request( + method='POST', url=HOST_SUBDIR, body=data, headers=headers) + except Exception as e: + raise NetworkError(self.network, e) + + try: + response_text = _unicode(conn.getresponse().read()) + except Exception as e: + raise MalformedResponseError(self.network, e) + + response_text = XML_ILLEGAL.sub("?", response_text) + + self._check_response_for_errors(response_text) + return response_text + + def execute(self, cacheable=False): + """Returns the XML DOM response of the POST Request from the server""" + + if self.network.is_caching_enabled() and cacheable: + response = self._get_cached_response() + else: + response = self._download_response() + + return minidom.parseString(_string(response).replace( + "opensearch:", "")) + + def _check_response_for_errors(self, response): + """Checks the response for errors and raises one if any exists.""" + + try: + doc = minidom.parseString(_string(response).replace( + "opensearch:", "")) + except Exception as e: + raise MalformedResponseError(self.network, e) + + e = doc.getElementsByTagName('lfm')[0] + + if e.getAttribute('status') != "ok": + e = doc.getElementsByTagName('error')[0] + status = e.getAttribute('code') + details = e.firstChild.data.strip() + raise WSError(self.network, status, details) + + +class SessionKeyGenerator(object): + """Methods of generating a session key: + 1) Web Authentication: + a. network = get_*_network(API_KEY, API_SECRET) + b. sg = SessionKeyGenerator(network) + c. url = sg.get_web_auth_url() + d. Ask the user to open the url and authorize you, and wait for it. + e. session_key = sg.get_web_auth_session_key(url) + 2) Username and Password Authentication: + a. network = get_*_network(API_KEY, API_SECRET) + b. username = raw_input("Please enter your username: ") + c. password_hash = pylast.md5(raw_input("Please enter your password: ") + d. session_key = SessionKeyGenerator(network).get_session_key(username, + password_hash) + + A session key's lifetime is infinite, unless the user revokes the rights + of the given API Key. + + If you create a Network object with just a API_KEY and API_SECRET and a + username and a password_hash, a SESSION_KEY will be automatically generated + for that network and stored in it so you don't have to do this manually, + unless you want to. + """ + + def __init__(self, network): + self.network = network + self.web_auth_tokens = {} + + def _get_web_auth_token(self): + """ + Retrieves a token from the network for web authentication. + The token then has to be authorized from getAuthURL before creating + session. + """ + + request = _Request(self.network, 'auth.getToken') + + # default action is that a request is signed only when + # a session key is provided. + request.sign_it() + + doc = request.execute() + + e = doc.getElementsByTagName('token')[0] + return e.firstChild.data + + def get_web_auth_url(self): + """ + The user must open this page, and you first, then + call get_web_auth_session_key(url) after that. + """ + + token = self._get_web_auth_token() + + url = '%(homepage)s/api/auth/?api_key=%(api)s&token=%(token)s' % \ + {"homepage": self.network.homepage, + "api": self.network.api_key, "token": token} + + self.web_auth_tokens[url] = token + + return url + + def get_web_auth_session_key(self, url, token=""): + """ + Retrieves the session key of a web authorization process by its url. + """ + + if url in self.web_auth_tokens.keys(): + token = self.web_auth_tokens[url] + else: + # This will raise a WSError if token is blank or unauthorized + token = token + + request = _Request(self.network, 'auth.getSession', {'token': token}) + + # default action is that a request is signed only when + # a session key is provided. + request.sign_it() + + doc = request.execute() + + return doc.getElementsByTagName('key')[0].firstChild.data + + def get_session_key(self, username, password_hash): + """ + Retrieve a session key with a username and a md5 hash of the user's + password. + """ + + params = { + "username": username, "authToken": md5(username + password_hash)} + request = _Request(self.network, "auth.getMobileSession", params) + + # default action is that a request is signed only when + # a session key is provided. + request.sign_it() + + doc = request.execute() + + return _extract(doc, "key") + + +TopItem = collections.namedtuple("TopItem", ["item", "weight"]) +SimilarItem = collections.namedtuple("SimilarItem", ["item", "match"]) +LibraryItem = collections.namedtuple( + "LibraryItem", ["item", "playcount", "tagcount"]) +PlayedTrack = collections.namedtuple( + "PlayedTrack", ["track", "album", "playback_date", "timestamp"]) +LovedTrack = collections.namedtuple( + "LovedTrack", ["track", "date", "timestamp"]) +ImageSizes = collections.namedtuple( + "ImageSizes", [ + "original", "large", "largesquare", "medium", "small", "extralarge"]) +Image = collections.namedtuple( + "Image", [ + "title", "url", "dateadded", "format", "owner", "sizes", "votes"]) +Shout = collections.namedtuple( + "Shout", ["body", "author", "date"]) + + +def _string_output(funct): + def r(*args): + return _string(funct(*args)) + + return r + + +def _pad_list(given_list, desired_length, padding=None): + """ + Pads a list to be of the desired_length. + """ + + while len(given_list) < desired_length: + given_list.append(padding) + + return given_list + + +class _BaseObject(object): + """An abstract webservices object.""" + + network = None + + def __init__(self, network, ws_prefix): + self.network = network + self.ws_prefix = ws_prefix + + def _request(self, method_name, cacheable=False, params=None): + if not params: + params = self._get_params() + + return _Request(self.network, method_name, params).execute(cacheable) + + def _get_params(self): + """Returns the most common set of parameters between all objects.""" + + return {} + + def __hash__(self): + # Convert any ints (or whatever) into strings + values = map(six.text_type, self._get_params().values()) + + return hash(self.network) + hash(six.text_type(type(self)) + "".join( + list(self._get_params().keys()) + list(values) + ).lower()) + + def _extract_cdata_from_request(self, method_name, tag_name, params): + doc = self._request(method_name, True, params) + + return doc.getElementsByTagName( + tag_name)[0].firstChild.wholeText.strip() + + def _get_things( + self, method, thing, thing_type, params=None, cacheable=True): + """Returns a list of the most played thing_types by this thing.""" + + doc = self._request( + self.ws_prefix + "." + method, cacheable, params) + + seq = [] + for node in doc.getElementsByTagName(thing): + title = _extract(node, "name") + artist = _extract(node, "name", 1) + playcount = _number(_extract(node, "playcount")) + + seq.append(TopItem( + thing_type(artist, title, self.network), playcount)) + + return seq + + def get_top_fans(self, limit=None, cacheable=True): + """Returns a list of the Users who played this the most. + # Parameters: + * limit int: Max elements. + # For Artist/Track + """ + + doc = self._request(self.ws_prefix + '.getTopFans', cacheable) + + seq = [] + + elements = doc.getElementsByTagName('user') + + for element in elements: + if limit and len(seq) >= limit: + break + + name = _extract(element, 'name') + weight = _number(_extract(element, 'weight')) + + seq.append(TopItem(User(name, self.network), weight)) + + return seq + + def share(self, users, message=None): + """ + Shares this (sends out recommendations). + Parameters: + * users [User|str,]: A list that can contain usernames, emails, + User objects, or all of them. + * message str: A message to include in the recommendation message. + Only for Artist/Event/Track. + """ + + # Last.fm currently accepts a max of 10 recipient at a time + while(len(users) > 10): + section = users[0:9] + users = users[9:] + self.share(section, message) + + nusers = [] + for user in users: + if isinstance(user, User): + nusers.append(user.get_name()) + else: + nusers.append(user) + + params = self._get_params() + recipients = ','.join(nusers) + params['recipient'] = recipients + if message: + params['message'] = message + + self._request(self.ws_prefix + '.share', False, params) + + def get_wiki_published_date(self): + """ + Returns the summary of the wiki. + Only for Album/Track. + """ + return self.get_wiki("published") + + def get_wiki_summary(self): + """ + Returns the summary of the wiki. + Only for Album/Track. + """ + return self.get_wiki("summary") + + def get_wiki_content(self): + """ + Returns the summary of the wiki. + Only for Album/Track. + """ + return self.get_wiki("content") + + def get_wiki(self, section): + """ + Returns a section of the wiki. + Only for Album/Track. + section can be "content", "summary" or + "published" (for published date) + """ + + doc = self._request(self.ws_prefix + ".getInfo", True) + + if len(doc.getElementsByTagName("wiki")) == 0: + return + + node = doc.getElementsByTagName("wiki")[0] + + return _extract(node, section) + + def get_shouts(self, limit=50, cacheable=False): + """ + Returns a sequence of Shout objects + """ + + shouts = [] + for node in _collect_nodes( + limit, + self, + self.ws_prefix + ".getShouts", + cacheable): + shouts.append( + Shout( + _extract(node, "body"), + User(_extract(node, "author"), self.network), + _extract(node, "date") + ) + ) + return shouts + + +class _Chartable(object): + """Common functions for classes with charts.""" + + def __init__(self, ws_prefix): + self.ws_prefix = ws_prefix # TODO move to _BaseObject? + + def get_weekly_chart_dates(self): + """Returns a list of From and To tuples for the available charts.""" + + doc = self._request(self.ws_prefix + ".getWeeklyChartList", True) + + seq = [] + for node in doc.getElementsByTagName("chart"): + seq.append((node.getAttribute("from"), node.getAttribute("to"))) + + return seq + + def get_weekly_album_charts(self, from_date=None, to_date=None): + """ + Returns the weekly album charts for the week starting from the + from_date value to the to_date value. + Only for Group or User. + """ + return self.get_weekly_charts("album", from_date, to_date) + + def get_weekly_artist_charts(self, from_date=None, to_date=None): + """ + Returns the weekly artist charts for the week starting from the + from_date value to the to_date value. + Only for Group, Tag or User. + """ + return self.get_weekly_charts("artist", from_date, to_date) + + def get_weekly_track_charts(self, from_date=None, to_date=None): + """ + Returns the weekly track charts for the week starting from the + from_date value to the to_date value. + Only for Group or User. + """ + return self.get_weekly_charts("track", from_date, to_date) + + def get_weekly_charts(self, chart_kind, from_date=None, to_date=None): + """ + Returns the weekly charts for the week starting from the + from_date value to the to_date value. + chart_kind should be one of "album", "artist" or "track" + """ + method = ".getWeekly" + chart_kind.title() + "Chart" + chart_type = eval(chart_kind.title()) # string to type + + params = self._get_params() + if from_date and to_date: + params["from"] = from_date + params["to"] = to_date + + doc = self._request( + self.ws_prefix + method, True, params) + + seq = [] + for node in doc.getElementsByTagName(chart_kind.lower()): + item = chart_type( + _extract(node, "artist"), _extract(node, "name"), self.network) + weight = _number(_extract(node, "playcount")) + seq.append(TopItem(item, weight)) + + return seq + + +class _Taggable(object): + """Common functions for classes with tags.""" + + def __init__(self, ws_prefix): + self.ws_prefix = ws_prefix # TODO move to _BaseObject + + def add_tags(self, tags): + """Adds one or several tags. + * tags: A sequence of tag names or Tag objects. + """ + + for tag in tags: + self.add_tag(tag) + + def add_tag(self, tag): + """Adds one tag. + * tag: a tag name or a Tag object. + """ + + if isinstance(tag, Tag): + tag = tag.get_name() + + params = self._get_params() + params['tags'] = tag + + self._request(self.ws_prefix + '.addTags', False, params) + + def remove_tag(self, tag): + """Remove a user's tag from this object.""" + + if isinstance(tag, Tag): + tag = tag.get_name() + + params = self._get_params() + params['tag'] = tag + + self._request(self.ws_prefix + '.removeTag', False, params) + + def get_tags(self): + """Returns a list of the tags set by the user to this object.""" + + # Uncacheable because it can be dynamically changed by the user. + params = self._get_params() + + doc = self._request(self.ws_prefix + '.getTags', False, params) + tag_names = _extract_all(doc, 'name') + tags = [] + for tag in tag_names: + tags.append(Tag(tag, self.network)) + + return tags + + def remove_tags(self, tags): + """Removes one or several tags from this object. + * tags: a sequence of tag names or Tag objects. + """ + + for tag in tags: + self.remove_tag(tag) + + def clear_tags(self): + """Clears all the user-set tags. """ + + self.remove_tags(*(self.get_tags())) + + def set_tags(self, tags): + """Sets this object's tags to only those tags. + * tags: a sequence of tag names or Tag objects. + """ + + c_old_tags = [] + old_tags = [] + c_new_tags = [] + new_tags = [] + + to_remove = [] + to_add = [] + + tags_on_server = self.get_tags() + + for tag in tags_on_server: + c_old_tags.append(tag.get_name().lower()) + old_tags.append(tag.get_name()) + + for tag in tags: + c_new_tags.append(tag.lower()) + new_tags.append(tag) + + for i in range(0, len(old_tags)): + if not c_old_tags[i] in c_new_tags: + to_remove.append(old_tags[i]) + + for i in range(0, len(new_tags)): + if not c_new_tags[i] in c_old_tags: + to_add.append(new_tags[i]) + + self.remove_tags(to_remove) + self.add_tags(to_add) + + def get_top_tags(self, limit=None): + """Returns a list of the most frequently used Tags on this object.""" + + doc = self._request(self.ws_prefix + '.getTopTags', True) + + elements = doc.getElementsByTagName('tag') + seq = [] + + for element in elements: + tag_name = _extract(element, 'name') + tagcount = _extract(element, 'count') + + seq.append(TopItem(Tag(tag_name, self.network), tagcount)) + + if limit: + seq = seq[:limit] + + return seq + + +class WSError(Exception): + """Exception related to the Network web service""" + + def __init__(self, network, status, details): + self.status = status + self.details = details + self.network = network + + @_string_output + def __str__(self): + return self.details + + def get_id(self): + """Returns the exception ID, from one of the following: + STATUS_INVALID_SERVICE = 2 + STATUS_INVALID_METHOD = 3 + STATUS_AUTH_FAILED = 4 + STATUS_INVALID_FORMAT = 5 + STATUS_INVALID_PARAMS = 6 + STATUS_INVALID_RESOURCE = 7 + STATUS_TOKEN_ERROR = 8 + STATUS_INVALID_SK = 9 + STATUS_INVALID_API_KEY = 10 + STATUS_OFFLINE = 11 + STATUS_SUBSCRIBERS_ONLY = 12 + STATUS_TOKEN_UNAUTHORIZED = 14 + STATUS_TOKEN_EXPIRED = 15 + """ + + return self.status + + +class MalformedResponseError(Exception): + """Exception conveying a malformed response from the music network.""" + + def __init__(self, network, underlying_error): + self.network = network + self.underlying_error = underlying_error + + def __str__(self): + return "Malformed response from {}. Underlying error: {}".format( + self.network.name, str(self.underlying_error)) + + +class NetworkError(Exception): + """Exception conveying a problem in sending a request to Last.fm""" + + def __init__(self, network, underlying_error): + self.network = network + self.underlying_error = underlying_error + + def __str__(self): + return "NetworkError: %s" % str(self.underlying_error) + + +class _Opus(_BaseObject, _Taggable): + """An album or track.""" + + artist = None + title = None + username = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, artist, title, network, ws_prefix, username=None): + """ + Create an opus instance. + # Parameters: + * artist: An artist name or an Artist object. + * title: The album or track title. + * ws_prefix: 'album' or 'track' + """ + + _BaseObject.__init__(self, network, ws_prefix) + _Taggable.__init__(self, ws_prefix) + + if isinstance(artist, Artist): + self.artist = artist + else: + self.artist = Artist(artist, self.network) + + self.title = title + self.username = username + + def __repr__(self): + return "pylast.%s(%s, %s, %s)" % ( + self.ws_prefix.title(), repr(self.artist.name), + repr(self.title), repr(self.network)) + + @_string_output + def __str__(self): + return _unicode("%s - %s") % ( + self.get_artist().get_name(), self.get_title()) + + def __eq__(self, other): + if type(self) != type(other): + return False + a = self.get_title().lower() + b = other.get_title().lower() + c = self.get_artist().get_name().lower() + d = other.get_artist().get_name().lower() + return (a == b) and (c == d) + + def __ne__(self, other): + return not self.__eq__(other) + + def _get_params(self): + return { + 'artist': self.get_artist().get_name(), + self.ws_prefix: self.get_title()} + + def get_artist(self): + """Returns the associated Artist object.""" + + return self.artist + + def get_title(self, properly_capitalized=False): + """Returns the artist or track title.""" + if properly_capitalized: + self.title = _extract( + self._request(self.ws_prefix + ".getInfo", True), "name") + + return self.title + + def get_name(self, properly_capitalized=False): + """Returns the album or track title (alias to get_title()).""" + + return self.get_title(properly_capitalized) + + def get_id(self): + """Returns the ID on the network.""" + + return _extract( + self._request(self.ws_prefix + ".getInfo", cacheable=True), "id") + + def get_playcount(self): + """Returns the number of plays on the network""" + + return _number(_extract( + self._request( + self.ws_prefix + ".getInfo", cacheable=True), "playcount")) + + def get_userplaycount(self): + """Returns the number of plays by a given username""" + + if not self.username: + return + + params = self._get_params() + params['username'] = self.username + + doc = self._request(self.ws_prefix + ".getInfo", True, params) + return _number(_extract(doc, "userplaycount")) + + def get_listener_count(self): + """Returns the number of listeners on the network""" + + return _number(_extract( + self._request( + self.ws_prefix + ".getInfo", cacheable=True), "listeners")) + + def get_mbid(self): + """Returns the MusicBrainz ID of the album or track.""" + + doc = self._request(self.ws_prefix + ".getInfo", cacheable=True) + + try: + lfm = doc.getElementsByTagName('lfm')[0] + opus = next(self._get_children_by_tag_name(lfm, self.ws_prefix)) + mbid = next(self._get_children_by_tag_name(opus, "mbid")) + return mbid.firstChild.nodeValue + except StopIteration: + return None + + def _get_children_by_tag_name(self, node, tag_name): + for child in node.childNodes: + if (child.nodeType == child.ELEMENT_NODE and + (tag_name == '*' or child.tagName == tag_name)): + yield child + + +class Album(_Opus): + """An album.""" + + __hash__ = _Opus.__hash__ + + def __init__(self, artist, title, network, username=None): + super(Album, self).__init__(artist, title, network, "album", username) + + def get_release_date(self): + """Returns the release date of the album.""" + + return _extract(self._request( + self.ws_prefix + ".getInfo", cacheable=True), "releasedate") + + def get_cover_image(self, size=COVER_EXTRA_LARGE): + """ + Returns a uri to the cover image + size can be one of: + COVER_EXTRA_LARGE + COVER_LARGE + COVER_MEDIUM + COVER_SMALL + """ + + return _extract_all( + self._request( + self.ws_prefix + ".getInfo", cacheable=True), 'image')[size] + + def get_tracks(self): + """Returns the list of Tracks on this album.""" + + return _extract_tracks( + self._request( + self.ws_prefix + ".getInfo", cacheable=True), "tracks") + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the URL of the album or track page on the network. + # Parameters: + * domain_name str: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + artist = _url_safe(self.get_artist().get_name()) + title = _url_safe(self.get_title()) + + return self.network._get_url( + domain_name, self.ws_prefix) % { + 'artist': artist, 'album': title} + + +class Artist(_BaseObject, _Taggable): + """An artist.""" + + name = None + username = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, name, network, username=None): + """Create an artist object. + # Parameters: + * name str: The artist's name. + """ + + _BaseObject.__init__(self, network, 'artist') + _Taggable.__init__(self, 'artist') + + self.name = name + self.username = username + + def __repr__(self): + return "pylast.Artist(%s, %s)" % ( + repr(self.get_name()), repr(self.network)) + + def __unicode__(self): + return six.text_type(self.get_name()) + + @_string_output + def __str__(self): + return self.__unicode__() + + def __eq__(self, other): + if type(self) is type(other): + return self.get_name().lower() == other.get_name().lower() + else: + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def _get_params(self): + return {self.ws_prefix: self.get_name()} + + def get_name(self, properly_capitalized=False): + """Returns the name of the artist. + If properly_capitalized was asserted then the name would be downloaded + overwriting the given one.""" + + if properly_capitalized: + self.name = _extract( + self._request(self.ws_prefix + ".getInfo", True), "name") + + return self.name + + def get_correction(self): + """Returns the corrected artist name.""" + + return _extract( + self._request(self.ws_prefix + ".getCorrection"), "name") + + def get_cover_image(self, size=COVER_MEGA): + """ + Returns a uri to the cover image + size can be one of: + COVER_MEGA + COVER_EXTRA_LARGE + COVER_LARGE + COVER_MEDIUM + COVER_SMALL + """ + + return _extract_all( + self._request(self.ws_prefix + ".getInfo", True), "image")[size] + + def get_playcount(self): + """Returns the number of plays on the network.""" + + return _number(_extract( + self._request(self.ws_prefix + ".getInfo", True), "playcount")) + + def get_userplaycount(self): + """Returns the number of plays by a given username""" + + if not self.username: + return + + params = self._get_params() + params['username'] = self.username + + doc = self._request(self.ws_prefix + ".getInfo", True, params) + return _number(_extract(doc, "userplaycount")) + + def get_mbid(self): + """Returns the MusicBrainz ID of this artist.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _extract(doc, "mbid") + + def get_listener_count(self): + """Returns the number of listeners on the network.""" + + if hasattr(self, "listener_count"): + return self.listener_count + else: + self.listener_count = _number(_extract( + self._request(self.ws_prefix + ".getInfo", True), "listeners")) + return self.listener_count + + def is_streamable(self): + """Returns True if the artist is streamable.""" + + return bool(_number(_extract( + self._request(self.ws_prefix + ".getInfo", True), "streamable"))) + + def get_bio(self, section, language=None): + """ + Returns a section of the bio. + section can be "content", "summary" or + "published" (for published date) + """ + if language: + params = self._get_params() + params["lang"] = language + else: + params = None + + return self._extract_cdata_from_request( + self.ws_prefix + ".getInfo", section, params) + + def get_bio_published_date(self): + """Returns the date on which the artist's biography was published.""" + return self.get_bio("published") + + def get_bio_summary(self, language=None): + """Returns the summary of the artist's biography.""" + return self.get_bio("summary", language) + + def get_bio_content(self, language=None): + """Returns the content of the artist's biography.""" + return self.get_bio("content", language) + + def get_upcoming_events(self): + """Returns a list of the upcoming Events for this artist.""" + + doc = self._request(self.ws_prefix + '.getEvents', True) + + return _extract_events_from_doc(doc, self.network) + + def get_similar(self, limit=None): + """Returns the similar artists on the network.""" + + params = self._get_params() + if limit: + params['limit'] = limit + + doc = self._request(self.ws_prefix + '.getSimilar', True, params) + + names = _extract_all(doc, "name") + matches = _extract_all(doc, "match") + + artists = [] + for i in range(0, len(names)): + artists.append(SimilarItem( + Artist(names[i], self.network), _number(matches[i]))) + + return artists + + def get_top_albums(self, limit=None, cacheable=True): + """Returns a list of the top albums.""" + params = self._get_params() + if limit: + params['limit'] = limit + + return self._get_things( + "getTopAlbums", "album", Album, params, cacheable) + + def get_top_tracks(self, limit=None, cacheable=True): + """Returns a list of the most played Tracks by this artist.""" + params = self._get_params() + if limit: + params['limit'] = limit + + return self._get_things( + "getTopTracks", "track", Track, params, cacheable) + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the artist page on the network. + # Parameters: + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + artist = _url_safe(self.get_name()) + + return self.network._get_url( + domain_name, "artist") % {'artist': artist} + + def shout(self, message): + """ + Post a shout + """ + + params = self._get_params() + params["message"] = message + + self._request("artist.Shout", False, params) + + def get_band_members(self): + """Returns a list of band members or None if unknown.""" + + names = None + doc = self._request(self.ws_prefix + ".getInfo", True) + + for node in doc.getElementsByTagName("bandmembers"): + names = _extract_all(node, "name") + + return names + + +class Event(_BaseObject): + """An event.""" + + id = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, event_id, network): + _BaseObject.__init__(self, network, 'event') + + self.id = event_id + + def __repr__(self): + return "pylast.Event(%s, %s)" % (repr(self.id), repr(self.network)) + + @_string_output + def __str__(self): + return "Event #" + str(self.get_id()) + + def __eq__(self, other): + if type(self) is type(other): + return self.get_id() == other.get_id() + else: + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def _get_params(self): + return {'event': self.get_id()} + + def attend(self, attending_status): + """Sets the attending status. + * attending_status: The attending status. Possible values: + o EVENT_ATTENDING + o EVENT_MAYBE_ATTENDING + o EVENT_NOT_ATTENDING + """ + + params = self._get_params() + params['status'] = attending_status + + self._request('event.attend', False, params) + + def get_attendees(self): + """ + Get a list of attendees for an event + """ + + doc = self._request("event.getAttendees", False) + + users = [] + for name in _extract_all(doc, "name"): + users.append(User(name, self.network)) + + return users + + def get_id(self): + """Returns the id of the event on the network. """ + + return self.id + + def get_title(self): + """Returns the title of the event. """ + + doc = self._request("event.getInfo", True) + + return _extract(doc, "title") + + def get_headliner(self): + """Returns the headliner of the event. """ + + doc = self._request("event.getInfo", True) + + return Artist(_extract(doc, "headliner"), self.network) + + def get_artists(self): + """Returns a list of the participating Artists. """ + + doc = self._request("event.getInfo", True) + names = _extract_all(doc, "artist") + + artists = [] + for name in names: + artists.append(Artist(name, self.network)) + + return artists + + def get_venue(self): + """Returns the venue where the event is held.""" + + doc = self._request("event.getInfo", True) + + v = doc.getElementsByTagName("venue")[0] + venue_id = _number(_extract(v, "id")) + + return Venue(venue_id, self.network, venue_element=v) + + def get_start_date(self): + """Returns the date when the event starts.""" + + doc = self._request("event.getInfo", True) + + return _extract(doc, "startDate") + + def get_description(self): + """Returns the description of the event. """ + + doc = self._request("event.getInfo", True) + + return _extract(doc, "description") + + def get_cover_image(self, size=COVER_MEGA): + """ + Returns a uri to the cover image + size can be one of: + COVER_MEGA + COVER_EXTRA_LARGE + COVER_LARGE + COVER_MEDIUM + COVER_SMALL + """ + + doc = self._request("event.getInfo", True) + + return _extract_all(doc, "image")[size] + + def get_attendance_count(self): + """Returns the number of attending people. """ + + doc = self._request("event.getInfo", True) + + return _number(_extract(doc, "attendance")) + + def get_review_count(self): + """Returns the number of available reviews for this event. """ + + doc = self._request("event.getInfo", True) + + return _number(_extract(doc, "reviews")) + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the event page on the network. + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + return self.network._get_url( + domain_name, "event") % {'id': self.get_id()} + + def shout(self, message): + """ + Post a shout + """ + + params = self._get_params() + params["message"] = message + + self._request("event.Shout", False, params) + + +class Country(_BaseObject): + """A country at Last.fm.""" + + name = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, name, network): + _BaseObject.__init__(self, network, "geo") + + self.name = name + + def __repr__(self): + return "pylast.Country(%s, %s)" % (repr(self.name), repr(self.network)) + + @_string_output + def __str__(self): + return self.get_name() + + def __eq__(self, other): + return self.get_name().lower() == other.get_name().lower() + + def __ne__(self, other): + return self.get_name() != other.get_name() + + def _get_params(self): # TODO can move to _BaseObject + return {'country': self.get_name()} + + def _get_name_from_code(self, alpha2code): + # TODO: Have this function lookup the alpha-2 code and return the + # country name. + + return alpha2code + + def get_name(self): + """Returns the country name. """ + + return self.name + + def get_top_artists(self, limit=None, cacheable=True): + """Returns a sequence of the most played artists.""" + params = self._get_params() + if limit: + params['limit'] = limit + + doc = self._request('geo.getTopArtists', cacheable, params) + + return _extract_top_artists(doc, self) + + def get_top_tracks(self, limit=None, cacheable=True): + """Returns a sequence of the most played tracks""" + params = self._get_params() + if limit: + params['limit'] = limit + + return self._get_things( + "getTopTracks", "track", Track, params, cacheable) + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the event page on the network. + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + country_name = _url_safe(self.get_name()) + + return self.network._get_url( + domain_name, "country") % {'country_name': country_name} + + +class Metro(_BaseObject): + """A metro at Last.fm.""" + + name = None + country = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, name, country, network): + _BaseObject.__init__(self, network, None) + + self.name = name + self.country = country + + def __repr__(self): + return "pylast.Metro(%s, %s, %s)" % ( + repr(self.name), repr(self.country), repr(self.network)) + + @_string_output + def __str__(self): + return self.get_name() + ", " + self.get_country() + + def __eq__(self, other): + return (self.get_name().lower() == other.get_name().lower() and + self.get_country().lower() == other.get_country().lower()) + + def __ne__(self, other): + return (self.get_name() != other.get_name() or + self.get_country().lower() != other.get_country().lower()) + + def _get_params(self): + return {'metro': self.get_name(), 'country': self.get_country()} + + def get_name(self): + """Returns the metro name.""" + + return self.name + + def get_country(self): + """Returns the metro country.""" + + return self.country + + def _get_chart( + self, method, tag="artist", limit=None, from_date=None, + to_date=None, cacheable=True): + """Internal helper for getting geo charts.""" + params = self._get_params() + if limit: + params["limit"] = limit + if from_date and to_date: + params["from"] = from_date + params["to"] = to_date + + doc = self._request(method, cacheable, params) + + seq = [] + for node in doc.getElementsByTagName(tag): + if tag == "artist": + item = Artist(_extract(node, "name"), self.network) + elif tag == "track": + title = _extract(node, "name") + artist = _extract_element_tree(node).get('artist')['name'] + item = Track(artist, title, self.network) + else: + return None + weight = _number(_extract(node, "listeners")) + seq.append(TopItem(item, weight)) + + return seq + + def get_artist_chart( + self, tag="artist", limit=None, from_date=None, to_date=None, + cacheable=True): + """Get a chart of artists for a metro. + Parameters: + from_date (Optional) : Beginning timestamp of the weekly range + requested + to_date (Optional) : Ending timestamp of the weekly range requested + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + return self._get_chart( + "geo.getMetroArtistChart", tag=tag, limit=limit, + from_date=from_date, to_date=to_date, cacheable=cacheable) + + def get_hype_artist_chart( + self, tag="artist", limit=None, from_date=None, to_date=None, + cacheable=True): + """Get a chart of hyped (up and coming) artists for a metro. + Parameters: + from_date (Optional) : Beginning timestamp of the weekly range + requested + to_date (Optional) : Ending timestamp of the weekly range requested + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + return self._get_chart( + "geo.getMetroHypeArtistChart", tag=tag, limit=limit, + from_date=from_date, to_date=to_date, cacheable=cacheable) + + def get_unique_artist_chart( + self, tag="artist", limit=None, from_date=None, to_date=None, + cacheable=True): + """Get a chart of the artists which make that metro unique. + Parameters: + from_date (Optional) : Beginning timestamp of the weekly range + requested + to_date (Optional) : Ending timestamp of the weekly range requested + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + return self._get_chart( + "geo.getMetroUniqueArtistChart", tag=tag, limit=limit, + from_date=from_date, to_date=to_date, cacheable=cacheable) + + def get_track_chart( + self, tag="track", limit=None, from_date=None, to_date=None, + cacheable=True): + """Get a chart of tracks for a metro. + Parameters: + from_date (Optional) : Beginning timestamp of the weekly range + requested + to_date (Optional) : Ending timestamp of the weekly range requested + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + return self._get_chart( + "geo.getMetroTrackChart", tag=tag, limit=limit, + from_date=from_date, to_date=to_date, cacheable=cacheable) + + def get_hype_track_chart( + self, tag="track", limit=None, from_date=None, to_date=None, + cacheable=True): + """Get a chart of tracks for a metro. + Parameters: + from_date (Optional) : Beginning timestamp of the weekly range + requested + to_date (Optional) : Ending timestamp of the weekly range requested + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + return self._get_chart( + "geo.getMetroHypeTrackChart", tag=tag, + limit=limit, from_date=from_date, to_date=to_date, + cacheable=cacheable) + + def get_unique_track_chart( + self, tag="track", limit=None, from_date=None, to_date=None, + cacheable=True): + """Get a chart of tracks for a metro. + Parameters: + from_date (Optional) : Beginning timestamp of the weekly range + requested + to_date (Optional) : Ending timestamp of the weekly range requested + limit (Optional) : The number of results to fetch per page. + Defaults to 50. + """ + return self._get_chart( + "geo.getMetroUniqueTrackChart", tag=tag, limit=limit, + from_date=from_date, to_date=to_date, cacheable=cacheable) + + +class Library(_BaseObject): + """A user's Last.fm library.""" + + user = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, user, network): + _BaseObject.__init__(self, network, 'library') + + if isinstance(user, User): + self.user = user + else: + self.user = User(user, self.network) + + self._albums_index = 0 + self._artists_index = 0 + self._tracks_index = 0 + + def __repr__(self): + return "pylast.Library(%s, %s)" % (repr(self.user), repr(self.network)) + + @_string_output + def __str__(self): + return repr(self.get_user()) + "'s Library" + + def _get_params(self): + return {'user': self.user.get_name()} + + def get_user(self): + """Returns the user who owns this library.""" + + return self.user + + def add_album(self, album): + """Add an album to this library.""" + + params = self._get_params() + params["artist"] = album.get_artist().get_name() + params["album"] = album.get_name() + + self._request("library.addAlbum", False, params) + + def remove_album(self, album): + """Remove an album from this library.""" + + params = self._get_params() + params["artist"] = album.get_artist().get_name() + params["album"] = album.get_name() + + self._request(self.ws_prefix + ".removeAlbum", False, params) + + def add_artist(self, artist): + """Add an artist to this library.""" + + params = self._get_params() + if type(artist) == str: + params["artist"] = artist + else: + params["artist"] = artist.get_name() + + self._request(self.ws_prefix + ".addArtist", False, params) + + def remove_artist(self, artist): + """Remove an artist from this library.""" + + params = self._get_params() + if type(artist) == str: + params["artist"] = artist + else: + params["artist"] = artist.get_name() + + self._request(self.ws_prefix + ".removeArtist", False, params) + + def add_track(self, track): + """Add a track to this library.""" + + params = self._get_params() + params["track"] = track.get_title() + + self._request(self.ws_prefix + ".addTrack", False, params) + + def get_albums(self, artist=None, limit=50, cacheable=True): + """ + Returns a sequence of Album objects + If no artist is specified, it will return all, sorted by decreasing + play count. + If limit==None it will return all (may take a while) + """ + + params = self._get_params() + if artist: + params["artist"] = artist + + seq = [] + for node in _collect_nodes( + limit, + self, + self.ws_prefix + ".getAlbums", + cacheable, + params): + name = _extract(node, "name") + artist = _extract(node, "name", 1) + playcount = _number(_extract(node, "playcount")) + tagcount = _number(_extract(node, "tagcount")) + + seq.append(LibraryItem( + Album(artist, name, self.network), playcount, tagcount)) + + return seq + + def get_artists(self, limit=50, cacheable=True): + """ + Returns a sequence of Album objects + if limit==None it will return all (may take a while) + """ + + seq = [] + for node in _collect_nodes( + limit, + self, + self.ws_prefix + ".getArtists", + cacheable): + name = _extract(node, "name") + + playcount = _number(_extract(node, "playcount")) + tagcount = _number(_extract(node, "tagcount")) + + seq.append(LibraryItem( + Artist(name, self.network), playcount, tagcount)) + + return seq + + def get_tracks(self, artist=None, album=None, limit=50, cacheable=True): + """ + Returns a sequence of Album objects + If limit==None it will return all (may take a while) + """ + + params = self._get_params() + if artist: + params["artist"] = artist + if album: + params["album"] = album + + seq = [] + for node in _collect_nodes( + limit, + self, + self.ws_prefix + ".getTracks", + cacheable, + params): + name = _extract(node, "name") + artist = _extract(node, "name", 1) + playcount = _number(_extract(node, "playcount")) + tagcount = _number(_extract(node, "tagcount")) + + seq.append(LibraryItem( + Track(artist, name, self.network), playcount, tagcount)) + + return seq + + def remove_scrobble(self, artist, title, timestamp): + """Remove a scrobble from a user's Last.fm library. Parameters: + artist (Required) : The artist that composed the track + title (Required) : The name of the track + timestamp (Required) : The unix timestamp of the scrobble + that you wish to remove + """ + + params = self._get_params() + params["artist"] = artist + params["track"] = title + params["timestamp"] = timestamp + + self._request(self.ws_prefix + ".removeScrobble", False, params) + + +class Playlist(_BaseObject): + """A Last.fm user playlist.""" + + id = None + user = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, user, playlist_id, network): + _BaseObject.__init__(self, network, "playlist") + + if isinstance(user, User): + self.user = user + else: + self.user = User(user, self.network) + + self.id = playlist_id + + @_string_output + def __str__(self): + return repr(self.user) + "'s playlist # " + repr(self.id) + + def _get_info_node(self): + """ + Returns the node from user.getPlaylists where this playlist's info is. + """ + + doc = self._request("user.getPlaylists", True) + + for node in doc.getElementsByTagName("playlist"): + if _extract(node, "id") == str(self.get_id()): + return node + + def _get_params(self): + return {'user': self.user.get_name(), 'playlistID': self.get_id()} + + def get_id(self): + """Returns the playlist ID.""" + + return self.id + + def get_user(self): + """Returns the owner user of this playlist.""" + + return self.user + + def get_tracks(self): + """Returns a list of the tracks on this user playlist.""" + + uri = _unicode('lastfm://playlist/%s') % self.get_id() + + return XSPF(uri, self.network).get_tracks() + + def add_track(self, track): + """Adds a Track to this Playlist.""" + + params = self._get_params() + params['artist'] = track.get_artist().get_name() + params['track'] = track.get_title() + + self._request('playlist.addTrack', False, params) + + def get_title(self): + """Returns the title of this playlist.""" + + return _extract(self._get_info_node(), "title") + + def get_creation_date(self): + """Returns the creation date of this playlist.""" + + return _extract(self._get_info_node(), "date") + + def get_size(self): + """Returns the number of tracks in this playlist.""" + + return _number(_extract(self._get_info_node(), "size")) + + def get_description(self): + """Returns the description of this playlist.""" + + return _extract(self._get_info_node(), "description") + + def get_duration(self): + """Returns the duration of this playlist in milliseconds.""" + + return _number(_extract(self._get_info_node(), "duration")) + + def is_streamable(self): + """ + Returns True if the playlist is streamable. + For a playlist to be streamable, it needs at least 45 tracks by 15 + different artists.""" + + if _extract(self._get_info_node(), "streamable") == '1': + return True + else: + return False + + def has_track(self, track): + """Checks to see if track is already in the playlist. + * track: Any Track object. + """ + + return track in self.get_tracks() + + def get_cover_image(self, size=COVER_EXTRA_LARGE): + """ + Returns a uri to the cover image + size can be one of: + COVER_MEGA + COVER_EXTRA_LARGE + COVER_LARGE + COVER_MEDIUM + COVER_SMALL + """ + + return _extract(self._get_info_node(), "image")[size] + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the playlist on the network. + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + english_url = _extract(self._get_info_node(), "url") + appendix = english_url[english_url.rfind("/") + 1:] + + return self.network._get_url(domain_name, "playlist") % { + 'appendix': appendix, "user": self.get_user().get_name()} + + +class Tag(_BaseObject, _Chartable): + """A Last.fm object tag.""" + + name = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, name, network): + _BaseObject.__init__(self, network, 'tag') + _Chartable.__init__(self, 'tag') + + self.name = name + + def __repr__(self): + return "pylast.Tag(%s, %s)" % (repr(self.name), repr(self.network)) + + @_string_output + def __str__(self): + return self.get_name() + + def __eq__(self, other): + return self.get_name().lower() == other.get_name().lower() + + def __ne__(self, other): + return self.get_name().lower() != other.get_name().lower() + + def _get_params(self): + return {self.ws_prefix: self.get_name()} + + def get_name(self, properly_capitalized=False): + """Returns the name of the tag. """ + + if properly_capitalized: + self.name = _extract( + self._request(self.ws_prefix + ".getInfo", True), "name") + + return self.name + + def get_similar(self): + """Returns the tags similar to this one, ordered by similarity. """ + + doc = self._request(self.ws_prefix + '.getSimilar', True) + + seq = [] + names = _extract_all(doc, 'name') + for name in names: + seq.append(Tag(name, self.network)) + + return seq + + def get_top_albums(self, limit=None, cacheable=True): + """Retuns a list of the top albums.""" + params = self._get_params() + if limit: + params['limit'] = limit + + doc = self._request( + self.ws_prefix + '.getTopAlbums', cacheable, params) + + return _extract_top_albums(doc, self.network) + + def get_top_tracks(self, limit=None, cacheable=True): + """Returns a list of the most played Tracks for this tag.""" + params = self._get_params() + if limit: + params['limit'] = limit + + return self._get_things( + "getTopTracks", "track", Track, params, cacheable) + + def get_top_artists(self, limit=None, cacheable=True): + """Returns a sequence of the most played artists.""" + + params = self._get_params() + if limit: + params['limit'] = limit + + doc = self._request( + self.ws_prefix + '.getTopArtists', cacheable, params) + + return _extract_top_artists(doc, self.network) + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the tag page on the network. + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + name = _url_safe(self.get_name()) + + return self.network._get_url(domain_name, "tag") % {'name': name} + + +class Track(_Opus): + """A Last.fm track.""" + + __hash__ = _Opus.__hash__ + + def __init__(self, artist, title, network, username=None): + super(Track, self).__init__(artist, title, network, "track", username) + + def get_correction(self): + """Returns the corrected track name.""" + + return _extract( + self._request(self.ws_prefix + ".getCorrection"), "name") + + def get_duration(self): + """Returns the track duration.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _number(_extract(doc, "duration")) + + def get_userloved(self): + """Whether the user loved this track""" + + if not self.username: + return + + params = self._get_params() + params['username'] = self.username + + doc = self._request(self.ws_prefix + ".getInfo", True, params) + loved = _number(_extract(doc, "userloved")) + return bool(loved) + + def is_streamable(self): + """Returns True if the track is available at Last.fm.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + return _extract(doc, "streamable") == "1" + + def is_fulltrack_available(self): + """Returns True if the fulltrack is available for streaming.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + return doc.getElementsByTagName( + "streamable")[0].getAttribute("fulltrack") == "1" + + def get_album(self): + """Returns the album object of this track.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + albums = doc.getElementsByTagName("album") + + if len(albums) == 0: + return + + node = doc.getElementsByTagName("album")[0] + return Album( + _extract(node, "artist"), _extract(node, "title"), self.network) + + def love(self): + """Adds the track to the user's loved tracks. """ + + self._request(self.ws_prefix + '.love') + + def unlove(self): + """Remove the track to the user's loved tracks. """ + + self._request(self.ws_prefix + '.unlove') + + def ban(self): + """Ban this track from ever playing on the radio. """ + + self._request(self.ws_prefix + '.ban') + + def get_similar(self): + """ + Returns similar tracks for this track on the network, + based on listening data. + """ + + doc = self._request(self.ws_prefix + '.getSimilar', True) + + seq = [] + for node in doc.getElementsByTagName(self.ws_prefix): + title = _extract(node, 'name') + artist = _extract(node, 'name', 1) + match = _number(_extract(node, "match")) + + seq.append(SimilarItem(Track(artist, title, self.network), match)) + + return seq + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the URL of the album or track page on the network. + # Parameters: + * domain_name str: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + artist = _url_safe(self.get_artist().get_name()) + title = _url_safe(self.get_title()) + + return self.network._get_url( + domain_name, self.ws_prefix) % { + 'artist': artist, 'title': title} + + +class Group(_BaseObject, _Chartable): + """A Last.fm group.""" + + name = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, name, network): + _BaseObject.__init__(self, network, 'group') + _Chartable.__init__(self, 'group') + + self.name = name + + def __repr__(self): + return "pylast.Group(%s, %s)" % (repr(self.name), repr(self.network)) + + @_string_output + def __str__(self): + return self.get_name() + + def __eq__(self, other): + return self.get_name().lower() == other.get_name().lower() + + def __ne__(self, other): + return self.get_name() != other.get_name() + + def _get_params(self): + return {self.ws_prefix: self.get_name()} + + def get_name(self): + """Returns the group name. """ + return self.name + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the group page on the network. + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + name = _url_safe(self.get_name()) + + return self.network._get_url(domain_name, "group") % {'name': name} + + def get_members(self, limit=50, cacheable=False): + """ + Returns a sequence of User objects + if limit==None it will return all + """ + + nodes = _collect_nodes( + limit, self, self.ws_prefix + ".getMembers", cacheable) + + users = [] + + for node in nodes: + users.append(User(_extract(node, "name"), self.network)) + + return users + + +class XSPF(_BaseObject): + "A Last.fm XSPF playlist.""" + + uri = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, uri, network): + _BaseObject.__init__(self, network, None) + + self.uri = uri + + def _get_params(self): + return {'playlistURL': self.get_uri()} + + @_string_output + def __str__(self): + return self.get_uri() + + def __eq__(self, other): + return self.get_uri() == other.get_uri() + + def __ne__(self, other): + return self.get_uri() != other.get_uri() + + def get_uri(self): + """Returns the Last.fm playlist URI. """ + + return self.uri + + def get_tracks(self): + """Returns the tracks on this playlist.""" + + doc = self._request('playlist.fetch', True) + + seq = [] + for node in doc.getElementsByTagName('track'): + title = _extract(node, 'title') + artist = _extract(node, 'creator') + + seq.append(Track(artist, title, self.network)) + + return seq + + +class User(_BaseObject, _Chartable): + """A Last.fm user.""" + + name = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, user_name, network): + _BaseObject.__init__(self, network, 'user') + _Chartable.__init__(self, 'user') + + self.name = user_name + + self._past_events_index = 0 + self._recommended_events_index = 0 + self._recommended_artists_index = 0 + + def __repr__(self): + return "pylast.User(%s, %s)" % (repr(self.name), repr(self.network)) + + @_string_output + def __str__(self): + return self.get_name() + + def __eq__(self, another): + if isinstance(another, User): + return self.get_name() == another.get_name() + else: + return False + + def __ne__(self, another): + if isinstance(another, User): + return self.get_name() != another.get_name() + else: + return True + + def _get_params(self): + return {self.ws_prefix: self.get_name()} + + def get_name(self, properly_capitalized=False): + """Returns the user name.""" + + if properly_capitalized: + self.name = _extract( + self._request(self.ws_prefix + ".getInfo", True), "name") + + return self.name + + def get_upcoming_events(self): + """Returns all the upcoming events for this user.""" + + doc = self._request(self.ws_prefix + '.getEvents', True) + + return _extract_events_from_doc(doc, self.network) + + def get_artist_tracks(self, artist, cacheable=False): + """ + Get a list of tracks by a given artist scrobbled by this user, + including scrobble time. + """ + # Not implemented: + # "Can be limited to specific timeranges, defaults to all time." + + params = self._get_params() + params['artist'] = artist + + seq = [] + for track in _collect_nodes( + None, + self, + self.ws_prefix + ".getArtistTracks", + cacheable, + params): + title = _extract(track, "name") + artist = _extract(track, "artist") + date = _extract(track, "date") + album = _extract(track, "album") + timestamp = track.getElementsByTagName( + "date")[0].getAttribute("uts") + + seq.append(PlayedTrack( + Track(artist, title, self.network), album, date, timestamp)) + + return seq + + def get_friends(self, limit=50, cacheable=False): + """Returns a list of the user's friends. """ + + seq = [] + for node in _collect_nodes( + limit, + self, + self.ws_prefix + ".getFriends", + cacheable): + seq.append(User(_extract(node, "name"), self.network)) + + return seq + + def get_loved_tracks(self, limit=50, cacheable=True): + """ + Returns this user's loved track as a sequence of LovedTrack objects in + reverse order of their timestamp, all the way back to the first track. + + If limit==None, it will try to pull all the available data. + + This method uses caching. Enable caching only if you're pulling a + large amount of data. + + Use extract_items() with the return of this function to + get only a sequence of Track objects with no playback dates. + """ + + params = self._get_params() + if limit: + params['limit'] = limit + + seq = [] + for track in _collect_nodes( + limit, + self, + self.ws_prefix + ".getLovedTracks", + cacheable, + params): + title = _extract(track, "name") + artist = _extract(track, "name", 1) + date = _extract(track, "date") + timestamp = track.getElementsByTagName( + "date")[0].getAttribute("uts") + + seq.append(LovedTrack( + Track(artist, title, self.network), date, timestamp)) + + return seq + + def get_neighbours(self, limit=50, cacheable=True): + """Returns a list of the user's friends.""" + + params = self._get_params() + if limit: + params['limit'] = limit + + doc = self._request( + self.ws_prefix + '.getNeighbours', cacheable, params) + + seq = [] + names = _extract_all(doc, 'name') + + for name in names: + seq.append(User(name, self.network)) + + return seq + + def get_past_events(self, limit=50, cacheable=False): + """ + Returns a sequence of Event objects + if limit==None it will return all + """ + + seq = [] + for node in _collect_nodes( + limit, + self, + self.ws_prefix + ".getPastEvents", + cacheable): + seq.append(Event(_extract(node, "id"), self.network)) + + return seq + + def get_playlists(self): + """Returns a list of Playlists that this user owns.""" + + doc = self._request(self.ws_prefix + ".getPlaylists", True) + + playlists = [] + for playlist_id in _extract_all(doc, "id"): + playlists.append( + Playlist(self.get_name(), playlist_id, self.network)) + + return playlists + + def get_now_playing(self): + """ + Returns the currently playing track, or None if nothing is playing. + """ + + params = self._get_params() + params['limit'] = '1' + + doc = self._request(self.ws_prefix + '.getRecentTracks', False, params) + + tracks = doc.getElementsByTagName('track') + + if len(tracks) == 0: + return None + + e = tracks[0] + + if not e.hasAttribute('nowplaying'): + return None + + artist = _extract(e, 'artist') + title = _extract(e, 'name') + + return Track(artist, title, self.network, self.name) + + def get_recent_tracks(self, limit=10, cacheable=True, + time_from=None, time_to=None): + """ + Returns this user's played track as a sequence of PlayedTrack objects + in reverse order of playtime, all the way back to the first track. + + Parameters: + limit : If None, it will try to pull all the available data. + from (Optional) : Beginning timestamp of a range - only display + scrobbles after this time, in UNIX timestamp format (integer + number of seconds since 00:00:00, January 1st 1970 UTC). This + must be in the UTC time zone. + to (Optional) : End timestamp of a range - only display scrobbles + before this time, in UNIX timestamp format (integer number of + seconds since 00:00:00, January 1st 1970 UTC). This must be in + the UTC time zone. + + This method uses caching. Enable caching only if you're pulling a + large amount of data. + + Use extract_items() with the return of this function to + get only a sequence of Track objects with no playback dates. + """ + + params = self._get_params() + if limit: + params['limit'] = limit + if time_from: + params['from'] = time_from + if time_to: + params['to'] = time_to + + seq = [] + for track in _collect_nodes( + limit, + self, + self.ws_prefix + ".getRecentTracks", + cacheable, + params): + + if track.hasAttribute('nowplaying'): + continue # to prevent the now playing track from sneaking in + + title = _extract(track, "name") + artist = _extract(track, "artist") + date = _extract(track, "date") + album = _extract(track, "album") + timestamp = track.getElementsByTagName( + "date")[0].getAttribute("uts") + + seq.append(PlayedTrack( + Track(artist, title, self.network), album, date, timestamp)) + + return seq + + def get_id(self): + """Returns the user ID.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _extract(doc, "id") + + def get_language(self): + """Returns the language code of the language used by the user.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _extract(doc, "lang") + + def get_country(self): + """Returns the name of the country of the user.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + country = _extract(doc, "country") + + if country is None: + return None + else: + return Country(country, self.network) + + def get_age(self): + """Returns the user's age.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _number(_extract(doc, "age")) + + def get_gender(self): + """Returns the user's gender. Either USER_MALE or USER_FEMALE.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + value = _extract(doc, "gender") + + if value == 'm': + return USER_MALE + elif value == 'f': + return USER_FEMALE + + return None + + def is_subscriber(self): + """Returns whether the user is a subscriber or not. True or False.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _extract(doc, "subscriber") == "1" + + def get_playcount(self): + """Returns the user's playcount so far.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _number(_extract(doc, "playcount")) + + def get_registered(self): + """Returns the user's registration date.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _extract(doc, "registered") + + def get_unixtime_registered(self): + """Returns the user's registration date as a UNIX timestamp.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return doc.getElementsByTagName( + "registered")[0].getAttribute("unixtime") + + def get_tagged_albums(self, tag, limit=None, cacheable=True): + """Returns the albums tagged by a user.""" + + params = self._get_params() + params['tag'] = tag + params['taggingtype'] = 'album' + if limit: + params['limit'] = limit + doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, + params) + return _extract_albums(doc, self.network) + + def get_tagged_artists(self, tag, limit=None): + """Returns the artists tagged by a user.""" + + params = self._get_params() + params['tag'] = tag + params['taggingtype'] = 'artist' + if limit: + params["limit"] = limit + doc = self._request(self.ws_prefix + '.getpersonaltags', True, params) + return _extract_artists(doc, self.network) + + def get_tagged_tracks(self, tag, limit=None, cacheable=True): + """Returns the tracks tagged by a user.""" + + params = self._get_params() + params['tag'] = tag + params['taggingtype'] = 'track' + if limit: + params['limit'] = limit + doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, + params) + return _extract_tracks(doc, self.network) + + def get_top_albums( + self, period=PERIOD_OVERALL, limit=None, cacheable=True): + """Returns the top albums played by a user. + * period: The period of time. Possible values: + o PERIOD_OVERALL + o PERIOD_7DAYS + o PERIOD_1MONTH + o PERIOD_3MONTHS + o PERIOD_6MONTHS + o PERIOD_12MONTHS + """ + + params = self._get_params() + params['period'] = period + if limit: + params['limit'] = limit + + doc = self._request( + self.ws_prefix + '.getTopAlbums', cacheable, params) + + return _extract_top_albums(doc, self.network) + + def get_top_artists(self, period=PERIOD_OVERALL, limit=None): + """Returns the top artists played by a user. + * period: The period of time. Possible values: + o PERIOD_OVERALL + o PERIOD_7DAYS + o PERIOD_1MONTH + o PERIOD_3MONTHS + o PERIOD_6MONTHS + o PERIOD_12MONTHS + """ + + params = self._get_params() + params['period'] = period + if limit: + params["limit"] = limit + + doc = self._request(self.ws_prefix + '.getTopArtists', True, params) + + return _extract_top_artists(doc, self.network) + + def get_top_tags(self, limit=None, cacheable=True): + """ + Returns a sequence of the top tags used by this user with their counts + as TopItem objects. + * limit: The limit of how many tags to return. + * cacheable: Whether to cache results. + """ + + params = self._get_params() + if limit: + params["limit"] = limit + + doc = self._request(self.ws_prefix + ".getTopTags", cacheable, params) + + seq = [] + for node in doc.getElementsByTagName("tag"): + seq.append(TopItem( + Tag(_extract(node, "name"), self.network), + _extract(node, "count"))) + + return seq + + def get_top_tracks( + self, period=PERIOD_OVERALL, limit=None, cacheable=True): + """Returns the top tracks played by a user. + * period: The period of time. Possible values: + o PERIOD_OVERALL + o PERIOD_7DAYS + o PERIOD_1MONTH + o PERIOD_3MONTHS + o PERIOD_6MONTHS + o PERIOD_12MONTHS + """ + + params = self._get_params() + params['period'] = period + if limit: + params['limit'] = limit + + return self._get_things( + "getTopTracks", "track", Track, params, cacheable) + + def compare_with_user(self, user, shared_artists_limit=None): + """ + Compare this user with another Last.fm user. + Returns a sequence: + (tasteometer_score, (shared_artist1, shared_artist2, ...)) + user: A User object or a username string/unicode object. + """ + + if isinstance(user, User): + user = user.get_name() + + params = self._get_params() + if shared_artists_limit: + params['limit'] = shared_artists_limit + params['type1'] = 'user' + params['type2'] = 'user' + params['value1'] = self.get_name() + params['value2'] = user + + doc = self._request('tasteometer.compare', False, params) + + score = _extract(doc, 'score') + + artists = doc.getElementsByTagName('artists')[0] + shared_artists_names = _extract_all(artists, 'name') + + shared_artists_seq = [] + + for name in shared_artists_names: + shared_artists_seq.append(Artist(name, self.network)) + + return (score, shared_artists_seq) + + def get_image(self): + """Returns the user's avatar.""" + + doc = self._request(self.ws_prefix + ".getInfo", True) + + return _extract(doc, "image") + + def get_url(self, domain_name=DOMAIN_ENGLISH): + """Returns the url of the user page on the network. + * domain_name: The network's language domain. Possible values: + o DOMAIN_ENGLISH + o DOMAIN_GERMAN + o DOMAIN_SPANISH + o DOMAIN_FRENCH + o DOMAIN_ITALIAN + o DOMAIN_POLISH + o DOMAIN_PORTUGUESE + o DOMAIN_SWEDISH + o DOMAIN_TURKISH + o DOMAIN_RUSSIAN + o DOMAIN_JAPANESE + o DOMAIN_CHINESE + """ + + name = _url_safe(self.get_name()) + + return self.network._get_url(domain_name, "user") % {'name': name} + + def get_library(self): + """Returns the associated Library object. """ + + return Library(self, self.network) + + def shout(self, message): + """ + Post a shout + """ + + params = self._get_params() + params["message"] = message + + self._request(self.ws_prefix + ".Shout", False, params) + + +class AuthenticatedUser(User): + def __init__(self, network): + User.__init__(self, "", network) + + def _get_params(self): + return {"user": self.get_name()} + + def get_name(self): + """Returns the name of the authenticated user.""" + + doc = self._request("user.getInfo", True, {"user": ""}) # hack + + self.name = _extract(doc, "name") + return self.name + + def get_recommended_events(self, limit=50, cacheable=False): + """ + Returns a sequence of Event objects + if limit==None it will return all + """ + + seq = [] + for node in _collect_nodes( + limit, self, "user.getRecommendedEvents", cacheable): + seq.append(Event(_extract(node, "id"), self.network)) + + return seq + + def get_recommended_artists(self, limit=50, cacheable=False): + """ + Returns a sequence of Artist objects + if limit==None it will return all + """ + + seq = [] + for node in _collect_nodes( + limit, self, "user.getRecommendedArtists", cacheable): + seq.append(Artist(_extract(node, "name"), self.network)) + + return seq + + +class _Search(_BaseObject): + """An abstract class. Use one of its derivatives.""" + + def __init__(self, ws_prefix, search_terms, network): + _BaseObject.__init__(self, network, ws_prefix) + + self._ws_prefix = ws_prefix + self.search_terms = search_terms + + self._last_page_index = 0 + + def _get_params(self): + params = {} + + for key in self.search_terms.keys(): + params[key] = self.search_terms[key] + + return params + + def get_total_result_count(self): + """Returns the total count of all the results.""" + + doc = self._request(self._ws_prefix + ".search", True) + + return _extract(doc, "opensearch:totalResults") + + def _retrieve_page(self, page_index): + """Returns the node of matches to be processed""" + + params = self._get_params() + params["page"] = str(page_index) + doc = self._request(self._ws_prefix + ".search", True, params) + + return doc.getElementsByTagName(self._ws_prefix + "matches")[0] + + def _retrieve_next_page(self): + self._last_page_index += 1 + return self._retrieve_page(self._last_page_index) + + +class AlbumSearch(_Search): + """Search for an album by name.""" + + def __init__(self, album_name, network): + + _Search.__init__(self, "album", {"album": album_name}, network) + + def get_next_page(self): + """Returns the next page of results as a sequence of Album objects.""" + + master_node = self._retrieve_next_page() + + seq = [] + for node in master_node.getElementsByTagName("album"): + seq.append(Album( + _extract(node, "artist"), + _extract(node, "name"), + self.network)) + + return seq + + +class ArtistSearch(_Search): + """Search for an artist by artist name.""" + + def __init__(self, artist_name, network): + _Search.__init__(self, "artist", {"artist": artist_name}, network) + + def get_next_page(self): + """Returns the next page of results as a sequence of Artist objects.""" + + master_node = self._retrieve_next_page() + + seq = [] + for node in master_node.getElementsByTagName("artist"): + artist = Artist(_extract(node, "name"), self.network) + artist.listener_count = _number(_extract(node, "listeners")) + seq.append(artist) + + return seq + + +class TagSearch(_Search): + """Search for a tag by tag name.""" + + def __init__(self, tag_name, network): + + _Search.__init__(self, "tag", {"tag": tag_name}, network) + + def get_next_page(self): + """Returns the next page of results as a sequence of Tag objects.""" + + master_node = self._retrieve_next_page() + + seq = [] + for node in master_node.getElementsByTagName("tag"): + tag = Tag(_extract(node, "name"), self.network) + tag.tag_count = _number(_extract(node, "count")) + seq.append(tag) + + return seq + + +class TrackSearch(_Search): + """ + Search for a track by track title. If you don't want to narrow the results + down by specifying the artist name, set it to empty string. + """ + + def __init__(self, artist_name, track_title, network): + + _Search.__init__( + self, + "track", + {"track": track_title, "artist": artist_name}, + network) + + def get_next_page(self): + """Returns the next page of results as a sequence of Track objects.""" + + master_node = self._retrieve_next_page() + + seq = [] + for node in master_node.getElementsByTagName("track"): + track = Track( + _extract(node, "artist"), + _extract(node, "name"), + self.network) + track.listener_count = _number(_extract(node, "listeners")) + seq.append(track) + + return seq + + +class VenueSearch(_Search): + """ + Search for a venue by its name. If you don't want to narrow the results + down by specifying a country, set it to empty string. + """ + + def __init__(self, venue_name, country_name, network): + + _Search.__init__( + self, + "venue", + {"venue": venue_name, "country": country_name}, + network) + + def get_next_page(self): + """Returns the next page of results as a sequence of Track objects.""" + + master_node = self._retrieve_next_page() + + seq = [] + for node in master_node.getElementsByTagName("venue"): + seq.append(Venue(_extract(node, "id"), self.network)) + + return seq + + +class Venue(_BaseObject): + """A venue where events are held.""" + + # TODO: waiting for a venue.getInfo web service to use. + # TODO: As an intermediate use case, can pass the venue DOM element when + # using Event.get_venue() to populate the venue info, if the venue.getInfo + # API call becomes available this workaround should be removed + + id = None + info = None + name = None + location = None + url = None + + __hash__ = _BaseObject.__hash__ + + def __init__(self, netword_id, network, venue_element=None): + _BaseObject.__init__(self, network, "venue") + + self.id = _number(netword_id) + if venue_element is not None: + self.info = _extract_element_tree(venue_element) + self.name = self.info.get('name') + self.url = self.info.get('url') + self.location = self.info.get('location') + + def __repr__(self): + return "pylast.Venue(%s, %s)" % (repr(self.id), repr(self.network)) + + @_string_output + def __str__(self): + return "Venue #" + str(self.id) + + def __eq__(self, other): + return self.get_id() == other.get_id() + + def _get_params(self): + return {self.ws_prefix: self.get_id()} + + def get_id(self): + """Returns the id of the venue.""" + + return self.id + + def get_name(self): + """Returns the name of the venue.""" + + return self.name + + def get_url(self): + """Returns the URL of the venue page.""" + + return self.url + + def get_location(self): + """Returns the location of the venue (dictionary).""" + + return self.location + + def get_upcoming_events(self): + """Returns the upcoming events in this venue.""" + + doc = self._request(self.ws_prefix + ".getEvents", True) + + return _extract_events_from_doc(doc, self.network) + + def get_past_events(self): + """Returns the past events held in this venue.""" + + doc = self._request(self.ws_prefix + ".getEvents", True) + + return _extract_events_from_doc(doc, self.network) + + +def md5(text): + """Returns the md5 hash of a string.""" + + h = hashlib.md5() + h.update(_unicode(text).encode("utf-8")) + + return h.hexdigest() + + +def _unicode(text): + if isinstance(text, six.binary_type): + return six.text_type(text, "utf-8") + elif isinstance(text, six.text_type): + return text + else: + return six.text_type(text) + + +def _string(string): + """For Python2 routines that can only process str type.""" + if isinstance(string, str): + return string + casted = six.text_type(string) + if sys.version_info[0] == 2: + casted = casted.encode("utf-8") + return casted + + +def cleanup_nodes(doc): + """ + Remove text nodes containing only whitespace + """ + for node in doc.documentElement.childNodes: + if node.nodeType == Node.TEXT_NODE and node.nodeValue.isspace(): + doc.documentElement.removeChild(node) + return doc + + +def _collect_nodes(limit, sender, method_name, cacheable, params=None): + """ + Returns a sequence of dom.Node objects about as close to limit as possible + """ + + if not params: + params = sender._get_params() + + nodes = [] + page = 1 + end_of_pages = False + + while not end_of_pages and (not limit or (limit and len(nodes) < limit)): + params["page"] = str(page) + doc = sender._request(method_name, cacheable, params) + doc = cleanup_nodes(doc) + + main = doc.documentElement.childNodes[0] + + if main.hasAttribute("totalPages"): + total_pages = _number(main.getAttribute("totalPages")) + elif main.hasAttribute("totalpages"): + total_pages = _number(main.getAttribute("totalpages")) + else: + raise Exception("No total pages attribute") + + for node in main.childNodes: + if not node.nodeType == xml.dom.Node.TEXT_NODE and ( + not limit or (len(nodes) < limit)): + nodes.append(node) + + if page >= total_pages: + end_of_pages = True + + page += 1 + + return nodes + + +def _extract(node, name, index=0): + """Extracts a value from the xml string""" + + nodes = node.getElementsByTagName(name) + + if len(nodes): + if nodes[index].firstChild: + return _unescape_htmlentity(nodes[index].firstChild.data.strip()) + else: + return None + + +def _extract_element_tree(node): + """Extract an element tree into a multi-level dictionary + + NB: If any elements have text nodes as well as nested + elements this will ignore the text nodes""" + + def _recurse_build_tree(rootNode, targetDict): + """Recursively build a multi-level dict""" + + def _has_child_elements(rootNode): + """Check if an element has any nested (child) elements""" + + for node in rootNode.childNodes: + if node.nodeType == node.ELEMENT_NODE: + return True + return False + + for node in rootNode.childNodes: + if node.nodeType == node.ELEMENT_NODE: + if _has_child_elements(node): + targetDict[node.tagName] = {} + _recurse_build_tree(node, targetDict[node.tagName]) + else: + val = None if node.firstChild is None else \ + _unescape_htmlentity(node.firstChild.data.strip()) + targetDict[node.tagName] = val + return targetDict + + return _recurse_build_tree(node, {}) + + +def _extract_all(node, name, limit_count=None): + """Extracts all the values from the xml string. returning a list.""" + + seq = [] + + for i in range(0, len(node.getElementsByTagName(name))): + if len(seq) == limit_count: + break + + seq.append(_extract(node, name, i)) + + return seq + + +def _extract_top_artists(doc, network): + # TODO Maybe include the _request here too? + seq = [] + for node in doc.getElementsByTagName("artist"): + name = _extract(node, "name") + playcount = _extract(node, "playcount") + + seq.append(TopItem(Artist(name, network), playcount)) + + return seq + + +def _extract_top_albums(doc, network): + # TODO Maybe include the _request here too? + seq = [] + for node in doc.getElementsByTagName("album"): + name = _extract(node, "name") + artist = _extract(node, "name", 1) + playcount = _extract(node, "playcount") + + seq.append(TopItem(Album(artist, name, network), playcount)) + + return seq + + +def _extract_artists(doc, network): + seq = [] + for node in doc.getElementsByTagName("artist"): + seq.append(Artist(_extract(node, "name"), network)) + return seq + + +def _extract_albums(doc, network): + seq = [] + for node in doc.getElementsByTagName("album"): + name = _extract(node, "name") + artist = _extract(node, "name", 1) + seq.append(Album(artist, name, network)) + return seq + + +def _extract_tracks(doc, network): + seq = [] + for node in doc.getElementsByTagName("track"): + name = _extract(node, "name") + artist = _extract(node, "name", 1) + seq.append(Track(artist, name, network)) + return seq + + +def _extract_events_from_doc(doc, network): + events = [] + for node in doc.getElementsByTagName("event"): + events.append(Event(_extract(node, "id"), network)) + return events + + +def _url_safe(text): + """Does all kinds of tricks on a text to make it safe to use in a url.""" + + return url_quote_plus(url_quote_plus(_string(text))).lower() + + +def _number(string): + """ + Extracts an int from a string. + Returns a 0 if None or an empty string was passed. + """ + + if not string: + return 0 + elif string == "": + return 0 + else: + try: + return int(string) + except ValueError: + return float(string) + + +def _unescape_htmlentity(string): + + # string = _unicode(string) + + mapping = htmlentitydefs.name2codepoint + for key in mapping: + string = string.replace("&%s;" % key, unichr(mapping[key])) + + return string + + +def extract_items(topitems_or_libraryitems): + """ + Extracts a sequence of items from a sequence of TopItem or + LibraryItem objects. + """ + + seq = [] + for i in topitems_or_libraryitems: + seq.append(i.item) + + return seq + + +class ScrobblingError(Exception): + def __init__(self, message): + Exception.__init__(self) + self.message = message + + @_string_output + def __str__(self): + return self.message + + +class BannedClientError(ScrobblingError): + def __init__(self): + ScrobblingError.__init__( + self, "This version of the client has been banned") + + +class BadAuthenticationError(ScrobblingError): + def __init__(self): + ScrobblingError.__init__(self, "Bad authentication token") + + +class BadTimeError(ScrobblingError): + def __init__(self): + ScrobblingError.__init__( + self, "Time provided is not close enough to current time") + + +class BadSessionError(ScrobblingError): + def __init__(self): + ScrobblingError.__init__( + self, "Bad session id, consider re-handshaking") + + +class _ScrobblerRequest(object): + + def __init__(self, url, params, network, request_type="POST"): + + for key in params: + params[key] = str(params[key]) + + self.params = params + self.type = request_type + (self.hostname, self.subdir) = url_split_host(url[len("http:"):]) + self.network = network + + def execute(self): + """Returns a string response of this request.""" + + if _can_use_ssl_securely(): + connection = HTTPSConnection( + context=SSL_CONTEXT, + host=self.hostname + ) + else: + connection = HTTPConnection( + host=self.hostname + ) + + data = [] + for name in self.params.keys(): + value = url_quote_plus(self.params[name]) + data.append('='.join((name, value))) + data = "&".join(data) + + headers = { + "Content-type": "application/x-www-form-urlencoded", + "Accept-Charset": "utf-8", + "User-Agent": "pylast" + "/" + __version__, + "HOST": self.hostname + } + + if self.type == "GET": + connection.request( + "GET", self.subdir + "?" + data, headers=headers) + else: + connection.request("POST", self.subdir, data, headers) + response = _unicode(connection.getresponse().read()) + + self._check_response_for_errors(response) + + return response + + def _check_response_for_errors(self, response): + """ + When passed a string response it checks for errors, raising any + exceptions as necessary. + """ + + lines = response.split("\n") + status_line = lines[0] + + if status_line == "OK": + return + elif status_line == "BANNED": + raise BannedClientError() + elif status_line == "BADAUTH": + raise BadAuthenticationError() + elif status_line == "BADTIME": + raise BadTimeError() + elif status_line == "BADSESSION": + raise BadSessionError() + elif status_line.startswith("FAILED "): + reason = status_line[status_line.find("FAILED ") + len("FAILED "):] + raise ScrobblingError(reason) + + +class Scrobbler(object): + """A class for scrobbling tracks to Last.fm""" + + session_id = None + nowplaying_url = None + submissions_url = None + + def __init__(self, network, client_id, client_version): + self.client_id = client_id + self.client_version = client_version + self.username = network.username + self.password = network.password_hash + self.network = network + + def _do_handshake(self): + """Handshakes with the server""" + + timestamp = str(int(time.time())) + + if self.password and self.username: + token = md5(self.password + timestamp) + elif self.network.api_key and self.network.api_secret and \ + self.network.session_key: + if not self.username: + self.username = self.network.get_authenticated_user()\ + .get_name() + token = md5(self.network.api_secret + timestamp) + + params = { + "hs": "true", "p": "1.2.1", "c": self.client_id, + "v": self.client_version, "u": self.username, "t": timestamp, + "a": token} + + if self.network.session_key and self.network.api_key: + params["sk"] = self.network.session_key + params["api_key"] = self.network.api_key + + server = self.network.submission_server + response = _ScrobblerRequest( + server, params, self.network, "GET").execute().split("\n") + + self.session_id = response[1] + self.nowplaying_url = response[2] + self.submissions_url = response[3] + + def _get_session_id(self, new=False): + """ + Returns a handshake. If new is true, then it will be requested from + the server even if one was cached. + """ + + if not self.session_id or new: + self._do_handshake() + + return self.session_id + + def report_now_playing( + self, artist, title, album="", duration="", track_number="", + mbid=""): + + _deprecation_warning( + "DeprecationWarning: Use Network.update_now_playing(...) instead") + + params = { + "s": self._get_session_id(), "a": artist, "t": title, + "b": album, "l": duration, "n": track_number, "m": mbid} + + try: + _ScrobblerRequest( + self.nowplaying_url, params, self.network + ).execute() + except BadSessionError: + self._do_handshake() + self.report_now_playing( + artist, title, album, duration, track_number, mbid) + + def scrobble( + self, artist, title, time_started, source, mode, duration, + album="", track_number="", mbid=""): + """Scrobble a track. parameters: + artist: Artist name. + title: Track title. + time_started: UTC timestamp of when the track started playing. + source: The source of the track + SCROBBLE_SOURCE_USER: Chosen by the user + (the most common value, unless you have a reason for + choosing otherwise, use this). + SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST: Non-personalised + broadcast (e.g. Shoutcast, BBC Radio 1). + SCROBBLE_SOURCE_PERSONALIZED_BROADCAST: Personalised + recommendation except Last.fm (e.g. Pandora, Launchcast). + SCROBBLE_SOURCE_LASTFM: ast.fm (any mode). In this case, the + 5-digit recommendation_key value must be set. + SCROBBLE_SOURCE_UNKNOWN: Source unknown. + mode: The submission mode + SCROBBLE_MODE_PLAYED: The track was played. + SCROBBLE_MODE_LOVED: The user manually loved the track + (implies a listen) + SCROBBLE_MODE_SKIPPED: The track was skipped + (Only if source was Last.fm) + SCROBBLE_MODE_BANNED: The track was banned + (Only if source was Last.fm) + duration: Track duration in seconds. + album: The album name. + track_number: The track number on the album. + mbid: MusicBrainz ID. + """ + + _deprecation_warning( + "DeprecationWarning: Use Network.scrobble(...) instead") + + params = { + "s": self._get_session_id(), + "a[0]": _string(artist), + "t[0]": _string(title), + "i[0]": str(time_started), + "o[0]": source, + "r[0]": mode, + "l[0]": str(duration), + "b[0]": _string(album), + "n[0]": track_number, + "m[0]": mbid + } + + _ScrobblerRequest(self.submissions_url, params, self.network).execute() + + def scrobble_many(self, tracks): + """ + Scrobble several tracks at once. + + tracks: A sequence of a sequence of parameters for each track. + The order of parameters is the same as if passed to the + scrobble() method. + """ + + _deprecation_warning( + "DeprecationWarning: Use Network.scrobble_many(...) instead") + + remainder = [] + + if len(tracks) > 50: + remainder = tracks[50:] + tracks = tracks[:50] + + params = {"s": self._get_session_id()} + + i = 0 + for t in tracks: + _pad_list(t, 9, "") + params["a[%s]" % str(i)] = _string(t[0]) + params["t[%s]" % str(i)] = _string(t[1]) + params["i[%s]" % str(i)] = str(t[2]) + params["o[%s]" % str(i)] = t[3] + params["r[%s]" % str(i)] = t[4] + params["l[%s]" % str(i)] = str(t[5]) + params["b[%s]" % str(i)] = _string(t[6]) + params["n[%s]" % str(i)] = t[7] + params["m[%s]" % str(i)] = t[8] + + i += 1 + + _ScrobblerRequest(self.submissions_url, params, self.network).execute() + + if remainder: + self.scrobble_many(remainder) + +# End of file diff --git a/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.pyo b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.pyo new file mode 100644 index 000000000..4b87fc96d Binary files /dev/null and b/.install/.kodi/addons/script.module.pylast/lib/pylast/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo index d59cb6301..a24375070 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo index 728c830f0..084919dc5 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/__version__.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo index bca901e23..9d7b5a775 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/_internal_utils.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo index a0510472b..6a41b7052 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/adapters.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo index 176c5db89..c9391e137 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/api.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo index 787c6858c..e3bd6bb05 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/auth.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo index ac8872627..2e42d7895 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/certs.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo index 561683a85..596d142ab 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/compat.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo index 00239aa17..39af363a5 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/cookies.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo index 4fb155c99..61ba8ffd0 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/exceptions.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo index bb79a1400..a56da536e 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/hooks.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo index 75feba3b1..1d27dfb19 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/models.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo index 28b3b5b47..a3dfe3fc0 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/packages.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo index 0bbc2021f..c71d4052a 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/sessions.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo index cdf0d5394..508a08b1a 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/status_codes.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo index 1be4f7fe2..c39a57c6d 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/structures.pyo differ diff --git a/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo b/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo index 2e9042b1d..2e984238d 100644 Binary files a/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo and b/.install/.kodi/addons/script.module.requests/lib/requests/utils.pyo differ diff --git a/.install/.kodi/addons/script.module.simplecache/LICENSE b/.install/.kodi/addons/script.module.simplecache/LICENSE new file mode 100644 index 000000000..8dada3eda --- /dev/null +++ b/.install/.kodi/addons/script.module.simplecache/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.install/.kodi/addons/script.module.simplecache/README.md b/.install/.kodi/addons/script.module.simplecache/README.md new file mode 100644 index 000000000..707a8de70 --- /dev/null +++ b/.install/.kodi/addons/script.module.simplecache/README.md @@ -0,0 +1,103 @@ +# script.module.simplecache + +[![Codacy Badge](https://api.codacy.com/project/badge/Grade/5e223503667f4a35a791d140f2cb6285)](https://www.codacy.com/app/m-vanderveldt/script-module-simplecache?utm_source=github.com&utm_medium=referral&utm_content=marcelveldt/script.module.simplecache&utm_campaign=badger) + +A simple object cache for Kodi addons + + +## Help needed with maintaining ! +I am very busy currently so I do not have a lot of time to work on this project or watch the forums. +Be aware that this is a community driven project, so feel free to submit PR's yourself to improve the code and/or help others with support on the forums etc. If you're willing to really participate in the development, please contact me so I can give you write access to the repo. I do my best to maintain the project every once in a while, when I have some spare time left. +Thanks for understanding! + + +## Usage + +You can use this python library as module within your own Kodi scripts/addons. +Just make sure to import it within your addon.xml: + +```xml + + + +``` + +Now, to use it in your Kodi addon/script, make sure to import it and you can access its methods. + +```python +import simplecache + +# instantiate the cache +_cache = simplecache.SimpleCache() + +# get data from cache +mycache = _cache.get("MyAddon.MyChunkOfData") +if mycache: + my_objects = mycache +else: + # do stuff here + my_objects = mymethod() + + # write results in cache + _cache.set( "MyAddon.MyChunkOfData", my_objects, expiration=datetime.timedelta(hours=12)) +``` + +The above example will check the cache for the key "MyAddon.MyChunkOfData". If there is any data (and the cache is not expired) it will be returned as the original object. + +If the cache is empty, you perform the usual stuff to get the data and save that to the cache + +--------------------------------------------------------------------------- + +## Available methods + +### get(endpoint, checksum="", json=False) +``` + Returns the data from the cache for the specified endpoint. Will return None if there is no cache. + + parameters: + endpoint --> Your unique reference/key for the cache object. TIP: To prevent clashes with other addons, prefix with your addon ID. + checksum --> Optional argument to check for a checksum in the file (Will only work if you store the checksum with the set method). Can be any python object which can be serialized with eval. + json --> Optional argument. Default is False. For JSON data it is recommended to switch it to True to avoid Memomy Error exceptions or other issues. If you set the global "data_is_json" bool to True, it will always handle your data as JSON. + + + Example: _cache.get("MyAddon.MyChunkOfData", checksum=len(myvideos)) + + This example will return the data in the cache but only if the length of the list myvideos is the same as whatever is stored as checksum in the cache. + +``` + +### set(endpoint, data, checksum="", expiration=timedelta(days=30), json=False) +``` + Stores the data in the cache for the specified endpoint. + + parameters: + endpoint --> Your unique reference/key for the cache object. TIP: To prevent clashes with other addons, prefix with your addon ID. + data --> Your objectdata. Can be any python object which can be serialized with eval. + checksum --> Optional argument to store as checksum in the file. Can be any python object which can be serialized with eval. + expiration --> Optional argument to specify the amount of time the data may be cached as python timedelta object. Defaults to 30 days if ommitted. + json --> Optional argument. Default is False. For JSON data it is recommended to switch it to True to avoid Memomy Error exceptions or other issues. If you set the global "data_is_json" bool to True, it will always handle your data as JSON. + + Example: _cache.set("MyAddon.MyGreatChunkOfData", my_objects, checksum=len(myvideos), expiration=timedelta(hours=1)) + + This example will store the data in the cache which will expire after 1 hours. Additionally a checksum is stored in the cache object. + +``` + +## Notes + +1) By default objects will be stored both in memory and on disk, it is however possible to override that: +``` + _cache.enable_mem_cache = False +``` +In that case, objects will only be stored on disk (database) + + +2) Cache objects are auto cleaned from memory after 2 hours to prevent unused objects loaded in memory. + + +3) Cache objects on disk are stored in a self-maintaining sqllite database. Expired objects will be auto cleaned from the database. + +4) If your data is only JSON you can set a global bool to handle all input/ouput requests as JSON. This is recommended to avoid problems and issues on slower devices. +``` + _cache.data_is_json = True +``` diff --git a/.install/.kodi/addons/script.module.simplecache/addon.xml b/.install/.kodi/addons/script.module.simplecache/addon.xml new file mode 100644 index 000000000..8ec846451 --- /dev/null +++ b/.install/.kodi/addons/script.module.simplecache/addon.xml @@ -0,0 +1,18 @@ + + + + + + + + Provides a simple file- and memory based cache for Kodi addons + Provides a simple file- and memory based cache for Kodi addons. Based on the original work of Marcelveldt. + all + Apache 2.0 + + https://github.com/sualfred/script.module.simplecache + + resources/icon.png + + + diff --git a/.install/.kodi/addons/script.module.simplecache/lib/simplecache.py b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.py new file mode 100644 index 000000000..0f52c553a --- /dev/null +++ b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.py @@ -0,0 +1,309 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +'''provides a simple stateless caching system for Kodi addons and plugins''' + +import sys +import xbmcvfs +import xbmcgui +import xbmc +import xbmcaddon +import datetime +import time +import sqlite3 +import json +from functools import reduce + +ADDON_ID = "script.module.simplecache" + +class SimpleCache(object): + '''simple stateless caching system for Kodi''' + enable_mem_cache = True + data_is_json = False + global_checksum = None + _exit = False + _auto_clean_interval = datetime.timedelta(hours=4) + _win = None + _busy_tasks = [] + _database = None + + def __init__(self): + '''Initialize our caching class''' + self._win = xbmcgui.Window(10000) + self._monitor = xbmc.Monitor() + self.check_cleanup() + self._log_msg("Initialized") + + def close(self): + '''tell any tasks to stop immediately (as we can be called multithreaded) and cleanup objects''' + self._exit = True + # wait for all tasks to complete + while self._busy_tasks and not self._monitor.abortRequested(): + xbmc.sleep(25) + del self._win + del self._monitor + self._log_msg("Closed") + + def __del__(self): + '''make sure close is called''' + if not self._exit: + self.close() + + def get(self, endpoint, checksum="", json_data=False): + ''' + get object from cache and return the results + endpoint: the (unique) name of the cache object as reference + checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided + ''' + checksum = self._get_checksum(checksum) + cur_time = self._get_timestamp(datetime.datetime.now()) + result = None + # 1: try memory cache first + if self.enable_mem_cache: + result = self._get_mem_cache(endpoint, checksum, cur_time, json_data) + + # 2: fallback to _database cache + if result is None: + result = self._get_db_cache(endpoint, checksum, cur_time, json_data) + + return result + + def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=30), json_data=False): + ''' + set data in cache + ''' + task_name = "set.%s" % endpoint + self._busy_tasks.append(task_name) + checksum = self._get_checksum(checksum) + expires = self._get_timestamp(datetime.datetime.now() + expiration) + + # memory cache: write to window property + if self.enable_mem_cache and not self._exit: + self._set_mem_cache(endpoint, checksum, expires, data, json_data) + + # db cache + if not self._exit: + self._set_db_cache(endpoint, checksum, expires, data, json_data) + + # remove this task from list + self._busy_tasks.remove(task_name) + + def check_cleanup(self): + '''check if cleanup is needed - public method, may be called by calling addon''' + cur_time = datetime.datetime.now() + lastexecuted = self._win.getProperty("simplecache.clean.lastexecuted") + if not lastexecuted: + self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time)) + elif (eval(lastexecuted) + self._auto_clean_interval) < cur_time: + # cleanup needed... + self._do_cleanup() + + def _get_mem_cache(self, endpoint, checksum, cur_time, json_data): + ''' + get cache data from memory cache + we use window properties because we need to be stateless + ''' + result = None + + cachedata = self._win.getProperty(endpoint.encode("utf-8")) + + if cachedata: + if json_data or self.data_is_json: + cachedata = json.loads(cachedata) + else: + cachedata = eval(cachedata) + if cachedata[0] > cur_time: + if not checksum or checksum == cachedata[2]: + result = cachedata[1] + return result + + def _set_mem_cache(self, endpoint, checksum, expires, data, json_data): + ''' + window property cache as alternative for memory cache + usefull for (stateless) plugins + ''' + cachedata = (expires, data, checksum) + if json_data or self.data_is_json: + cachedata_str = json.dumps(cachedata).encode("utf-8") + else: + cachedata_str = repr(cachedata).encode("utf-8") + + self._win.setProperty(endpoint.encode("utf-8"), cachedata_str) + + def _get_db_cache(self, endpoint, checksum, cur_time, json_data): + '''get cache data from sqllite _database''' + result = None + query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?" + cache_data = self._execute_sql(query, (endpoint,)) + if cache_data: + cache_data = cache_data.fetchone() + if cache_data and cache_data[0] > cur_time: + if not checksum or cache_data[2] == checksum: + if json_data or self.data_is_json: + result = json.loads(cache_data[1]) + else: + result = eval(cache_data[1]) + # also set result in memory cache for further access + if self.enable_mem_cache: + self._set_mem_cache(endpoint, checksum, cache_data[0], result, json_data) + return result + + def _set_db_cache(self, endpoint, checksum, expires, data, json_data): + ''' store cache data in _database ''' + query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)" + if json_data or self.data_is_json: + data = json.dumps(data) + else: + data = repr(data) + self._execute_sql(query, (endpoint, expires, data, checksum)) + + def _do_cleanup(self): + '''perform cleanup task''' + if self._exit or self._monitor.abortRequested(): + return + self._busy_tasks.append(__name__) + cur_time = datetime.datetime.now() + cur_timestamp = self._get_timestamp(cur_time) + self._log_msg("Running cleanup...") + if self._win.getProperty("simplecachecleanbusy"): + return + self._win.setProperty("simplecachecleanbusy", "busy") + + query = "SELECT id, expires FROM simplecache" + for cache_data in self._execute_sql(query).fetchall(): + cache_id = cache_data[0] + cache_expires = cache_data[1] + if self._exit or self._monitor.abortRequested(): + return + # always cleanup all memory objects on each interval + self._win.clearProperty(cache_id.encode("utf-8")) + # clean up db cache object only if expired + if cache_expires < cur_timestamp: + query = 'DELETE FROM simplecache WHERE id = ?' + self._execute_sql(query, (cache_id,)) + self._log_msg("delete from db %s" % cache_id) + + # compact db + self._execute_sql("VACUUM") + + # remove task from list + self._busy_tasks.remove(__name__) + self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time)) + self._win.clearProperty("simplecachecleanbusy") + self._log_msg("Auto cleanup done") + + def _get_database(self): + '''get reference to our sqllite _database - performs basic integrity check''' + addon = xbmcaddon.Addon(ADDON_ID) + dbpath = addon.getAddonInfo('profile') + dbfile = xbmc.translatePath("%s/simplecache.db" % dbpath).decode('utf-8') + + if not xbmcvfs.exists(dbpath): + xbmcvfs.mkdirs(dbpath) + del addon + try: + connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) + connection.execute('SELECT * FROM simplecache LIMIT 1') + return connection + except Exception as error: + # our _database is corrupt or doesn't exist yet, we simply try to recreate it + if xbmcvfs.exists(dbfile): + xbmcvfs.delete(dbfile) + try: + connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) + connection.execute( + """CREATE TABLE IF NOT EXISTS simplecache( + id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""") + return connection + except Exception as error: + self._log_msg("Exception while initializing _database: %s" % str(error), xbmc.LOGWARNING) + self.close() + return None + + def _execute_sql(self, query, data=None): + '''little wrapper around execute and executemany to just retry a db command if db is locked''' + retries = 0 + result = None + error = None + # always use new db object because we need to be sure that data is available for other simplecache instances + with self._get_database() as _database: + while not retries == 10 and not self._monitor.abortRequested(): + if self._exit: + return None + try: + if isinstance(data, list): + result = _database.executemany(query, data) + elif data: + result = _database.execute(query, data) + else: + result = _database.execute(query) + return result + except sqlite3.OperationalError as error: + if "_database is locked" in error: + self._log_msg("retrying DB commit...") + retries += 1 + self._monitor.waitForAbort(0.5) + else: + break + except Exception as error: + break + self._log_msg("_database ERROR ! -- %s" % str(error), xbmc.LOGWARNING) + return None + + @staticmethod + def _log_msg(msg, loglevel=xbmc.LOGDEBUG): + '''helper to send a message to the kodi log''' + if isinstance(msg, unicode): + msg = msg.encode('utf-8') + + xbmc.log("Skin Helper Simplecache --> %s" % msg, level=loglevel) + + @staticmethod + def _get_timestamp(date_time): + '''Converts a datetime object to unix timestamp''' + return int(time.mktime(date_time.timetuple())) + + def _get_checksum(self, stringinput): + '''get int checksum from string''' + if not stringinput and not self.global_checksum: + return 0 + if self.global_checksum: + stringinput = "%s-%s" %(self.global_checksum, stringinput) + else: + stringinput = str(stringinput) + return reduce(lambda x, y: x + y, map(ord, stringinput)) + + +def use_cache(cache_days=14): + ''' + wrapper around our simple cache to use as decorator + Usage: define an instance of SimpleCache with name "cache" (self.cache) in your class + Any method that needs caching just add @use_cache as decorator + NOTE: use unnamed arguments for calling the method and named arguments for optional settings + ''' + def decorator(func): + '''our decorator''' + def decorated(*args, **kwargs): + '''process the original method and apply caching of the results''' + method_class = args[0] + method_class_name = method_class.__class__.__name__ + cache_str = "%s.%s" % (method_class_name, func.__name__) + # cache identifier is based on positional args only + # named args are considered optional and ignored + for item in args[1:]: + cache_str += u".%s" % item + cache_str = cache_str.lower() + cachedata = method_class.cache.get(cache_str) + global_cache_ignore = False + try: + global_cache_ignore = method_class.ignore_cache + except Exception: + pass + if cachedata is not None and not kwargs.get("ignore_cache", False) and not global_cache_ignore: + return cachedata + else: + result = func(*args, **kwargs) + method_class.cache.set(cache_str, result, expiration=datetime.timedelta(days=cache_days)) + return result + return decorated + return decorator diff --git a/.install/.kodi/addons/script.module.simplecache/lib/simplecache.pyo b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.pyo new file mode 100644 index 000000000..a4db6a417 Binary files /dev/null and b/.install/.kodi/addons/script.module.simplecache/lib/simplecache.pyo differ diff --git a/.install/.kodi/addons/script.module.simplecache/resources/icon.png b/.install/.kodi/addons/script.module.simplecache/resources/icon.png new file mode 100644 index 000000000..ca4c53adb Binary files /dev/null and b/.install/.kodi/addons/script.module.simplecache/resources/icon.png differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo index a7890129e..389ba142d 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo index 1bbff8706..594aca458 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/compat.pyo differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo index 1f45c0473..6aa04bfd8 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/decoder.pyo differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo index a0d8b3bc9..a605c8336 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/encoder.pyo differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo index 6bac340f3..bbdbd9f38 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/errors.pyo differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo index f2a2b802c..818a6c8c8 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/raw_json.pyo differ diff --git a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo index 6011f348f..721ba5778 100644 Binary files a/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo and b/.install/.kodi/addons/script.module.simplejson/lib/simplejson/scanner.pyo differ diff --git a/.install/.kodi/addons/script.module.six/LICENSE b/.install/.kodi/addons/script.module.six/LICENSE new file mode 100644 index 000000000..d76e02426 --- /dev/null +++ b/.install/.kodi/addons/script.module.six/LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2010-2014 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.install/.kodi/addons/script.module.six/README b/.install/.kodi/addons/script.module.six/README new file mode 100644 index 000000000..4de73fa95 --- /dev/null +++ b/.install/.kodi/addons/script.module.six/README @@ -0,0 +1,16 @@ +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports every Python version since 2.5. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at http://pythonhosted.org/six/. + +Bugs can be reported to http://bitbucket.org/gutworth/six. The code can also be +found there. + +For questions about six or porting in general, email the python-porting mailing +list: http://mail.python.org/mailman/listinfo/python-porting diff --git a/.install/.kodi/addons/script.module.six/addon.xml b/.install/.kodi/addons/script.module.six/addon.xml new file mode 100644 index 000000000..0a1f6b28c --- /dev/null +++ b/.install/.kodi/addons/script.module.six/addon.xml @@ -0,0 +1,20 @@ + + + + + + + + Python 2 and 3 compatibility utilities. + Six is a Python 2 and 3 compatibility library. It provides utility functions for smoothing over the differences between the Python versions with the goal of writing Python code that is compatible on both Python versions. See the documentation for more information on what is provided. + all + MIT + https://pypi.org/project/six/ + https://pypi.org/project/six/ + + diff --git a/.install/.kodi/addons/script.module.six/icon.png b/.install/.kodi/addons/script.module.six/icon.png new file mode 100644 index 000000000..ca4c53adb Binary files /dev/null and b/.install/.kodi/addons/script.module.six/icon.png differ diff --git a/.install/.kodi/addons/script.module.six/lib/six.py b/.install/.kodi/addons/script.module.six/lib/six.py new file mode 100644 index 000000000..357e624ab --- /dev/null +++ b/.install/.kodi/addons/script.module.six/lib/six.py @@ -0,0 +1,963 @@ +# Copyright (c) 2010-2019 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.13.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/.install/.kodi/addons/script.module.six/lib/six.pyo b/.install/.kodi/addons/script.module.six/lib/six.pyo new file mode 100644 index 000000000..dbab29d1e Binary files /dev/null and b/.install/.kodi/addons/script.module.six/lib/six.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo index e92d67bc8..94abd98b4 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo index 58d777ebb..ad19660e8 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/_collections.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo index af3d98687..e43d14fd7 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connection.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo index 47e025cee..e563b5f36 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/connectionpool.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo index e93ba009d..3fcd56e76 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo index a4ae3b65f..f55ddb6fd 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/_appengine_environ.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo index 733cbb6fa..98b67d78d 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/pyopenssl.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo index 91665b44d..04afcffae 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/contrib/socks.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo index 26d55490a..32176138e 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/exceptions.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo index 487f69552..f5750c56f 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/fields.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo index fa5ff6757..ea3cf7203 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/filepost.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo index 803341dfa..b8fcd28f0 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo index d1c33f13a..5a846fb80 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/six.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo index da84627a6..911ac7393 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo index d49a0278e..95e3cdcd4 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/packages/ssl_match_hostname/_implementation.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo index 10c44bd92..d1bc8fa4d 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/poolmanager.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo index 0a7dbbb0d..8811f7159 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/request.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo index b4418f796..def744c6d 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/response.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo index b624dcde5..2108e8c80 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo index e4a4d7dab..16435e88e 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/connection.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo index 77b188ba1..9c4b06d73 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/queue.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo index 3a024d562..c1fd88a72 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/request.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo index e4bf4f53a..b1fb4cc24 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/response.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo index aed45dd27..939983fc0 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/retry.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo index 8835e2654..96fe505ea 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/ssl_.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo index aece5b09b..d4619d363 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/timeout.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo index 18521ae15..3196837c4 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/url.pyo differ diff --git a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo index 8d1f9650f..fc4de4109 100644 Binary files a/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo and b/.install/.kodi/addons/script.module.urllib3/lib/urllib3/util/wait.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/addon.xml b/.install/.kodi/addons/script.module.xbmcswift2/addon.xml index 12e14a2c0..e66e3d59c 100644 --- a/.install/.kodi/addons/script.module.xbmcswift2/addon.xml +++ b/.install/.kodi/addons/script.module.xbmcswift2/addon.xml @@ -1,4 +1,4 @@ - + diff --git a/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt b/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt index 9fa934979..d11c2cb13 100644 --- a/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt +++ b/.install/.kodi/addons/script.module.xbmcswift2/changelog.txt @@ -1,5 +1,8 @@ CHANGES ======= +Version 13.0.3 (15/12/2020) +- Remove offscreen support (only supported in leia++) + Version 13.0.0 (12/4/2020) - Fix args - py2 only version diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/__init__.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/__init__.pyo new file mode 100644 index 000000000..1c8a3839e Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/__init__.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/common.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/common.pyo new file mode 100644 index 000000000..6d9bda90a Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/common.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/constants.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/constants.pyo new file mode 100644 index 000000000..762f1f06c Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/constants.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py index c7c8ad898..6ffe5291c 100644 --- a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py +++ b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.py @@ -17,15 +17,14 @@ class ListItem(object): of any set properties that xbmcgui doesn't expose getters for. ''' def __init__(self, label=None, label2=None, icon=None, thumbnail=None, - path=None, fanart=None, offscreen=False): + path=None, fanart=None): '''Defaults are an emtpy string since xbmcgui.ListItem will not accept None. ''' kwargs = { 'label': label, 'label2': label2, - 'path': path, - 'offscreen': offscreen + 'path': path } #kwargs = dict((key, val) for key, val in locals().items() if val is #not None and key != 'self') @@ -194,14 +193,13 @@ class ListItem(object): def from_dict(cls, label=None, label2=None, icon=None, thumbnail=None, path=None, selected=None, info=None, properties=None, context_menu=None, replace_context_menu=False, - is_playable=None, info_type='video', stream_info=None, fanart=None, - offscreen=False): + is_playable=None, info_type='video', stream_info=None, fanart=None): '''A ListItem constructor for setting a lot of properties not available in the regular __init__ method. Useful to collect all the properties in a dict and then use the **dct to call this method. ''' - listitem = cls(label, label2, icon, thumbnail, path, fanart, offscreen) + listitem = cls(label, label2, icon, thumbnail, path, fanart) if selected is not None: listitem.select(selected) diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.pyo new file mode 100644 index 000000000..2c599e83a Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/listitem.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/logger.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/logger.pyo new file mode 100644 index 000000000..082e6bdf4 Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/logger.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/module.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/module.pyo new file mode 100644 index 000000000..a9993a977 Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/module.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/plugin.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/plugin.pyo new file mode 100644 index 000000000..5ec3d7fe5 Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/plugin.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/request.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/request.pyo new file mode 100644 index 000000000..89a13380c Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/request.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/storage.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/storage.pyo new file mode 100644 index 000000000..2e242ac15 Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/storage.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/urls.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/urls.pyo new file mode 100644 index 000000000..9b440cb57 Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/urls.pyo differ diff --git a/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.pyo b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.pyo new file mode 100644 index 000000000..4552ea6c0 Binary files /dev/null and b/.install/.kodi/addons/script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.pyo differ diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo index b3a6f562a..781ab747b 100644 Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/__init__.pyo differ diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo index 54d59c7e4..22df0f7fb 100644 Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/common.pyo differ diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo index b135f059f..b650d520f 100644 Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/json_interface.pyo differ diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo index 0aeaf75d6..a4b1437e1 100644 Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/service.pyo differ diff --git a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo index 98527e32e..dccd111c0 100644 Binary files a/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo and b/.install/.kodi/addons/service.xbmc.versioncheck/resources/lib/version_check/versions.pyo differ diff --git a/.install/.kodi/temp/kodi.log b/.install/.kodi/temp/kodi.log deleted file mode 100644 index 9ece62d6d..000000000 --- a/.install/.kodi/temp/kodi.log +++ /dev/null @@ -1,341 +0,0 @@ -2020-12-13 14:19:18.522 T:2984189216 NOTICE: ----------------------------------------------------------------------- -2020-12-13 14:19:18.522 T:2984189216 NOTICE: Starting Kodi (18.9 (18.9.0) Git:20201027-6d9d93e-dirty). Platform: Linux ARM 32-bit -2020-12-13 14:19:18.522 T:2984189216 NOTICE: Using Debug Kodi x32 build -2020-12-13 14:19:18.522 T:2984189216 NOTICE: Kodi compiled 2020-11-18 by GCC 8.3.0 for Linux ARM 32-bit version 5.4.38 (328742) -2020-12-13 14:19:18.523 T:2984189216 NOTICE: Running on XBian 1.0 (knockout), kernel: Linux ARM 32-bit version 5.4.75+ -2020-12-13 14:19:18.523 T:2984189216 NOTICE: FFmpeg version/source: 4.0.4-Kodi -2020-12-13 14:19:18.523 T:2984189216 NOTICE: Host CPU: ARMv7 Processor rev 3 (v7l), 4 cores available -2020-12-13 14:19:18.523 T:2984189216 NOTICE: ARM Features: Neon enabled -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://xbmc/ is mapped to: /usr/local/share/kodi -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://xbmcbin/ is mapped to: /usr/local/lib/kodi -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://xbmcbinaddons/ is mapped to: /usr/local/lib/kodi/addons -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://masterprofile/ is mapped to: /home/xbian/.kodi/userdata -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://envhome/ is mapped to: /home/xbian -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://home/ is mapped to: /home/xbian/.kodi -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://temp/ is mapped to: /home/xbian/.kodi/temp -2020-12-13 14:19:18.523 T:2984189216 NOTICE: special://logpath/ is mapped to: /home/xbian/.kodi/temp -2020-12-13 14:19:18.523 T:2984189216 NOTICE: The executable running is: /usr/local/lib/kodi/kodi-gbm -2020-12-13 14:19:18.523 T:2984189216 NOTICE: Local hostname: cuvelima -2020-12-13 14:19:18.523 T:2984189216 NOTICE: Log File is located: /home/xbian/.kodi/temp/kodi.log -2020-12-13 14:19:18.523 T:2984189216 NOTICE: ----------------------------------------------------------------------- -2020-12-13 14:19:18.524 T:2984189216 INFO: loading settings -2020-12-13 14:19:18.526 T:2984189216 NOTICE: special://profile/ is mapped to: special://masterprofile/ -2020-12-13 14:19:18.549 T:2984189216 DEBUG: CSkinSettings: no tag found -2020-12-13 14:19:18.549 T:2984189216 NOTICE: No settings file to load (special://xbmc/system/advancedsettings.xml) -2020-12-13 14:19:18.549 T:2984189216 NOTICE: No settings file to load (special://masterprofile/advancedsettings.xml) -2020-12-13 14:19:18.549 T:2984189216 NOTICE: Default Video Player: VideoPlayer -2020-12-13 14:19:18.549 T:2984189216 NOTICE: Default Audio Player: paplayer -2020-12-13 14:19:18.549 T:2984189216 NOTICE: Disabled debug logging due to GUI setting. Level 0. -2020-12-13 14:19:18.549 T:2984189216 NOTICE: Log level changed to "LOG_LEVEL_NORMAL" -2020-12-13 14:19:18.550 T:2984189216 NOTICE: CMediaSourceSettings: loading media sources from special://masterprofile/sources.xml -2020-12-13 14:19:18.679 T:2984189216 NOTICE: PulseAudio: Server not running -2020-12-13 14:19:18.714 T:2984189216 NOTICE: Running database version Addons27 -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.2sf v2.0.3 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.asap v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.dumb v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.fluidsynth v2.1.1 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.gme v2.0.3 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.gsf v2.0.3 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.modplug v2.0.3 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.ncsf v2.0.3 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.nosefart v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.openmpt v2.0.4 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.organya v1.2.1 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.qsf v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.sidplay v1.2.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.snesapu v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.ssf v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.stsound v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.timidity v2.0.5 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.upse v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.usf v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.vgmstream v1.1.5 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audiodecoder.wsr v2.0.2 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audioencoder.flac v2.0.6 installed -2020-12-13 14:19:18.885 T:2984189216 NOTICE: ADDON: audioencoder.kodi.builtin.aac v1.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.kodi.builtin.wma v1.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.lame v2.0.4 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.vorbis v2.0.4 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: audioencoder.wav v2.0.3 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: game.controller.default v1.0.8 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: game.controller.snes v1.0.8 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: imagedecoder.heif v1.1.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: imagedecoder.mpo v1.1.2 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: imagedecoder.raw v2.1.2 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: inputstream.adaptive v2.4.6 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: inputstream.rtmp v2.0.9 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.audioengine v1.0.1 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.filesystem v1.0.2 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.general v1.0.3 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.gui v5.12.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.main v1.0.14 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.global.network v1.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.audiodecoder v2.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.audioencoder v2.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.game v1.1.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.imagedecoder v2.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.inputstream v2.0.8 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.peripheral v1.3.7 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.pvr v5.10.3 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.screensaver v2.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.vfs v2.0.0 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.videocodec v1.0.1 installed -2020-12-13 14:19:18.886 T:2984189216 NOTICE: ADDON: kodi.binary.instance.visualization v2.0.1 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: kodi.resource v1.0.0 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.album.universal v3.1.3 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.artists.universal v4.3.3 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.allmusic.com v3.2.2 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.fanart.tv v3.6.3 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.imdb.com v3.1.6 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.musicbrainz.org v2.2.4 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.theaudiodb.com v2.0.3 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.common.themoviedb.org v3.2.12 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.local v1.0.0 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.themoviedb.org v5.2.5 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: metadata.tvshows.themoviedb.org v3.5.11 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: peripheral.joystick v1.4.9 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: peripheral.xarcade v1.1.0 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.video.arteplussept v1.0.2 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.video.francetv v2.0.0 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.video.vstream v0.8.3 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: plugin.xbianconfig v18.0.1 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.argustv v3.5.6 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.dvblink v4.7.3 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.dvbviewer v3.7.13 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.filmon v2.4.6 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.hdhomerun v3.5.1 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.hts v4.4.21 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.iptvarchive v3.7.2 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.iptvsimple v3.9.8 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.mediaportal.tvserver v3.5.19 installed -2020-12-13 14:19:18.887 T:2984189216 NOTICE: ADDON: pvr.mythtv v5.10.19 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.nextpvr v3.3.21 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.njoy v3.4.3 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.octonet v0.7.1 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.pctv v2.4.7 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.stalker v3.4.10 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.teleboy v18.2.3 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.vbox v4.7.0 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.vdr.vnsi v3.6.4 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.vuplus v3.28.9 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.wmc v2.4.6 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: pvr.zattoo v18.1.21 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: repository.superrepo.org.gotham.all v0.5.206 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: repository.vstream v0.0.4 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: repository.xbmc.org v3.1.6 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.images.weathericons.default v1.1.8 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.language.en_gb v2.0.1 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.language.fr_fr v9.0.24 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: resource.uisounds.kodi v1.0.0 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: screensaver.shadertoy v2.0.0 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: screensaver.xbmc.builtin.black v1.0.33 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: screensaver.xbmc.builtin.dim v1.0.59 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.beautifulsoup4 v4.6.2 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.certifi v2019.9.11 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.chardet v3.0.4 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.idna v2.8 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.inputstreamhelper v0.5.1 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.pil v1.1.7 installed -2020-12-13 14:19:18.888 T:2984189216 NOTICE: ADDON: script.module.pycryptodome v3.4.3 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.requests v2.22.0 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.simplejson v3.16.1 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.urllib3 v1.25.6 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.module.xbmcswift2 v13.0.2 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: script.service.xbian.upstart-bridge v2.0.2 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: service.xbmc.versioncheck v0.5.12 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: skin.estouchy v2.0.28 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: skin.estuary v2.0.27 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.libarchive v1.0.7 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.rar v2.3.2 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.sacd v1.0.4 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: vfs.sftp v1.0.6 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: visualization.shadertoy v1.2.4 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: visualization.spectrum v3.0.4 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: visualization.waveform v3.1.2 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: webinterface.default v18.x-2.4.6 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.addon v18.9 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.core v0.1.0 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.gui v5.14.0 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.json v10.3.0 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.metadata v2.1.0 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.python v2.26.0 installed -2020-12-13 14:19:18.889 T:2984189216 NOTICE: ADDON: xbmc.webinterface v1.0.0 installed -2020-12-13 14:19:19.221 T:2984189216 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanSuspend” -2020-12-13 14:19:19.222 T:2984189216 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanHibernate” -2020-12-13 14:19:19.248 T:2952786128 NOTICE: Found 2 Lists of Devices -2020-12-13 14:19:19.248 T:2952786128 NOTICE: Enumerated ALSA devices: -2020-12-13 14:19:19.248 T:2952786128 NOTICE: Device 1 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceName : default -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayName : Default (bcm2835 HDMI 1 bcm2835 HDMI 1) -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 14:19:19.249 T:2952786128 NOTICE: Device 2 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=b1 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayName : bcm2835 HDMI 1 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 HDMI 1 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 14:19:19.249 T:2952786128 NOTICE: Device 3 -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=Headphones -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayName : bcm2835 Headphones -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 Headphones -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR -2020-12-13 14:19:19.249 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 14:19:19.250 T:2952786128 NOTICE: Enumerated PI devices: -2020-12-13 14:19:19.250 T:2952786128 NOTICE: Device 1 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceName : HDMI -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayName : HDMI -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_HDMI -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_channels : FL, FR -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,24000,32000,44100,48000,88200,96000,176400,192000 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32NE,AE_FMT_S16NE,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP,AE_FMT_RAW -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_streamTypes : STREAM_TYPE_AC3,STREAM_TYPE_EAC3,STREAM_TYPE_DTSHD_CORE,STREAM_TYPE_DTS_2048,STREAM_TYPE_DTS_1024,STREAM_TYPE_DTS_512 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: Device 2 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceName : Analogue -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayName : Analogue -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_channels : FL, FR -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_sampleRates : 48000 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 14:19:19.250 T:2952786128 NOTICE: Device 3 -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceName : Both -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayName : HDMI and Analogue -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 14:19:19.250 T:2952786128 NOTICE: m_channels : FL, FR -2020-12-13 14:19:19.251 T:2952786128 NOTICE: m_sampleRates : 48000 -2020-12-13 14:19:19.251 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP -2020-12-13 14:19:19.251 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 14:19:19.524 T:2984189216 NOTICE: Raspberry PI firmware version: Jul 13 2020 13:56:29 - Copyright (c) 2012 Broadcom - version adcebbdb7b415c623931e80795ba3bae68dcc4fa (clean) (release) (start_x) -2020-12-13 14:19:19.524 T:2984189216 NOTICE: ARM mem: 704MB GPU mem: 320MB MPG2:0 WVC1:0 -2020-12-13 14:19:19.524 T:2984189216 NOTICE: cache.memorysize: 20MB libass.cache: 0MB -2020-12-13 14:19:19.524 T:2984189216 NOTICE: Config: - arm_freq=1500 - audio_pwm_mode=514 - config_hdmi_boost=5 - core_freq=500 - core_freq_min=200 - disable_commandline_tags=2 - disable_l2cache=1 - disable_overscan=1 - disable_splash=1 - display_hdmi_rotate=-1 - display_lcd_rotate=-1 - enable_gic=1 - force_eeprom_read=1 - force_pwm_open=1 - framebuffer_ignore_alpha=1 - framebuffer_swap=1 - gpu_freq=500 - gpu_freq_min=250 - init_uart_clock=0x2dc6c00 - initial_turbo=3 - lcd_framerate=60 - mask_gpu_interrupt0=1024 - mask_gpu_interrupt1=0x10000 - max_framebuffers=2 - over_voltage_avs=-20000 - pause_burst_frames=1 - program_serial_random=1 - total_mem=8192 - hdmi_force_cec_address:0=65535 - hdmi_force_cec_address:1=65535 - hdmi_ignore_cec_init:0=1 - hdmi_pixel_freq_limit:0=0x11e1a300 - hdmi_pixel_freq_limit:1=0x11e1a300 -2020-12-13 14:19:19.524 T:2984189216 NOTICE: Config: - decode_MPG2=0x00000000 - decode_WVC1=0x00000000 - device_tree=- - overlay_prefix=overlays/ - hdmi_cvt:0= - hdmi_cvt:1= - hdmi_edid_filename:0= - hdmi_edid_filename:1= - hdmi_timings:0= - hdmi_timings:1= -2020-12-13 14:19:20.344 T:2984189216 WARNING: CDRMUtils::FindPlane - could not find plane -2020-12-13 14:19:20.345 T:2984189216 WARNING: CDRMUtils::InitDrm - failed to set drm master, will try to authorize instead: Permission denied -2020-12-13 14:19:20.345 T:2984189216 NOTICE: CDRMUtils::InitDrm - successfully authorized drm magic -2020-12-13 14:19:22.355 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 60.000000 Hz -2020-12-13 14:19:22.355 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 59.940063 Hz -2020-12-13 14:19:22.355 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 59.940063 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 50.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 50.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x768 with 1280x768 @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x720 with 1280x720 @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x720 with 1280x720 @ 59.940063 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1280x720 with 1280x720 @ 50.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1024x768 with 1024x768 @ 75.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1024x768 with 1024x768 @ 70.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 1024x768 with 1024x768 @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 75.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 72.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 800x600 with 800x600 @ 56.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x576 with 720x576 @ 50.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x576 with 720x576i @ 50.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480 @ 59.940063 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480 @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480i @ 59.940063 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 720x480 with 720x480i @ 60.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 640x480 with 640x480 @ 75.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 640x480 with 640x480 @ 73.000000 Hz -2020-12-13 14:19:22.356 T:2984189216 NOTICE: Found resolution 640x480 with 640x480 @ 60.000000 Hz -2020-12-13 14:19:22.357 T:2984189216 NOTICE: Previous line repeats 1 times. -2020-12-13 14:19:22.357 T:2984189216 NOTICE: Found resolution 720x400 with 720x400 @ 70.000000 Hz -2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_VERSION = 1.4 -2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_VENDOR = Mesa Project -2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_EXTENSIONS = EGL_EXT_buffer_age EGL_EXT_image_dma_buf_import EGL_EXT_image_dma_buf_import_modifiers EGL_KHR_cl_event2 EGL_KHR_config_attribs EGL_KHR_create_context EGL_KHR_create_context_no_error EGL_KHR_fence_sync EGL_KHR_get_all_proc_addresses EGL_KHR_gl_colorspace EGL_KHR_gl_renderbuffer_image EGL_KHR_gl_texture_2D_image EGL_KHR_gl_texture_3D_image EGL_KHR_gl_texture_cubemap_image EGL_KHR_image EGL_KHR_image_base EGL_KHR_image_pixmap EGL_KHR_no_config_context EGL_KHR_reusable_sync EGL_KHR_surfaceless_context EGL_EXT_pixel_format_float EGL_KHR_wait_sync EGL_MESA_configless_context EGL_MESA_drm_image EGL_MESA_image_dma_buf_export EGL_WL_bind_wayland_display -2020-12-13 14:19:22.373 T:2984189216 NOTICE: EGL_CLIENT_EXTENSIONS = EGL_EXT_device_base EGL_EXT_device_enumeration EGL_EXT_device_query EGL_EXT_platform_base EGL_KHR_client_get_all_proc_addresses EGL_EXT_client_extensions EGL_KHR_debug EGL_EXT_platform_wayland EGL_EXT_platform_x11 EGL_MESA_platform_gbm EGL_MESA_platform_surfaceless -2020-12-13 14:19:22.377 T:2984189216 NOTICE: Checking resolution 16 -2020-12-13 14:19:22.377 T:2984189216 WARNING: CGBMUtils::DestroySurface - surface already destroyed -2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_VENDOR = Broadcom -2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_RENDERER = V3D 4.2 -2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_VERSION = OpenGL ES 3.0 Mesa 19.1.7 (git-b9d7244035) -2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_SHADING_LANGUAGE_VERSION = OpenGL ES GLSL ES 3.00 -2020-12-13 14:19:22.422 T:2984189216 NOTICE: GL_EXTENSIONS = GL_EXT_blend_minmax GL_EXT_multi_draw_arrays GL_EXT_texture_format_BGRA8888 GL_OES_compressed_ETC1_RGB8_texture GL_OES_depth24 GL_OES_element_index_uint GL_OES_fbo_render_mipmap GL_OES_mapbuffer GL_OES_rgb8_rgba8 GL_OES_standard_derivatives GL_OES_stencil8 GL_OES_texture_3D GL_OES_texture_float GL_OES_texture_half_float GL_OES_texture_half_float_linear GL_OES_texture_npot GL_OES_vertex_half_float GL_EXT_texture_sRGB_decode GL_OES_EGL_image GL_OES_depth_texture GL_OES_packed_depth_stencil GL_EXT_texture_type_2_10_10_10_REV GL_OES_get_program_binary GL_APPLE_texture_max_level GL_EXT_discard_framebuffer GL_EXT_read_format_bgra GL_EXT_frag_depth GL_NV_fbo_color_attachments GL_OES_EGL_image_external GL_OES_EGL_sync GL_OES_vertex_array_object GL_EXT_occlusion_query_boolean GL_EXT_texture_rg GL_EXT_unpack_subimage GL_NV_draw_buffers GL_NV_read_buffer GL_NV_read_depth GL_NV_read_depth_stencil GL_NV_read_stencil GL_EXT_draw_buffers GL_EXT_map_buffer_range GL_KHR_debug GL_KHR_texture_compression_astc_ldr GL_OES_depth_texture_cube_map GL_OES_required_internalformat GL_OES_surfaceless_context GL_EXT_color_buffer_float GL_EXT_sRGB_write_control GL_EXT_separate_shader_objects GL_EXT_shader_integer_mix GL_EXT_base_instance GL_EXT_compressed_ETC1_RGB8_sub_texture GL_EXT_draw_elements_base_vertex GL_EXT_texture_border_clamp GL_KHR_context_flush_control GL_OES_draw_elements_base_vertex GL_OES_texture_border_clamp GL_OES_texture_stencil8 GL_EXT_float_blend GL_KHR_no_error GL_KHR_texture_compression_astc_sliced_3d GL_OES_EGL_image_external_essl3 GL_MESA_shader_integer_functions GL_KHR_parallel_shader_compile GL_EXT_texture_query_lod -2020-12-13 14:19:23.375 T:2984189216 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption -2020-12-13 14:19:23.375 T:2984189216 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled! -2020-12-13 14:19:23.583 T:2802266320 NOTICE: Running database version Addons27 -2020-12-13 14:19:23.586 T:2802266320 NOTICE: Running database version ViewModes6 -2020-12-13 14:19:23.587 T:2802266320 NOTICE: Running database version Textures13 -2020-12-13 14:19:23.594 T:2802266320 NOTICE: Running database version MyMusic72 -2020-12-13 14:19:23.604 T:2802266320 NOTICE: Running database version MyVideos116 -2020-12-13 14:19:23.607 T:2802266320 NOTICE: Running database version TV32 -2020-12-13 14:19:23.609 T:2802266320 NOTICE: Running database version Epg12 -2020-12-13 14:19:23.624 T:2984189216 NOTICE: start dvd mediatype detection -2020-12-13 14:19:23.679 T:2984189216 NOTICE: load skin from: /usr/local/share/kodi/addons/skin.estuary (version: 2.0.27) -2020-12-13 14:19:24.147 T:2802266320 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption -2020-12-13 14:19:24.147 T:2802266320 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled! -2020-12-13 14:19:24.153 T:2984189216 WARNING: JSONRPC: Could not parse type "Setting.Details.SettingList" -2020-12-13 14:19:24.225 T:2984189216 NOTICE: Register - new keyboard device registered on application->keyboard: Clavier (0000:0000) -2020-12-13 14:19:24.225 T:2984189216 NOTICE: Register - new mouse device registered on application->mouse: Souris (0000:0000) -2020-12-13 14:19:24.229 T:2984189216 NOTICE: Loading player core factory settings from special://xbmc/system/playercorefactory.xml. -2020-12-13 14:19:24.230 T:2984189216 NOTICE: Loaded playercorefactory configuration -2020-12-13 14:19:24.230 T:2984189216 NOTICE: Loading player core factory settings from special://masterprofile/playercorefactory.xml. -2020-12-13 14:19:24.230 T:2984189216 NOTICE: special://masterprofile/playercorefactory.xml does not exist. Skipping. -2020-12-13 14:19:24.259 T:2984189216 NOTICE: initialize done -2020-12-13 14:19:24.259 T:2984189216 NOTICE: XBian: notifying Upstart that I'm well -2020-12-13 14:19:24.429 T:2984189216 NOTICE: Running the application... -2020-12-13 14:19:24.435 T:2984189216 NOTICE: starting zeroconf publishing -2020-12-13 14:19:24.436 T:2984189216 NOTICE: CWebServer[8080]: Started -2020-12-13 14:19:24.440 T:2655285456 NOTICE: ES: Starting UDP Event server on port 9777 -2020-12-13 14:19:24.440 T:2655285456 NOTICE: UDP: Listening on port 9777 (ipv6 : false) -2020-12-13 14:19:24.618 T:2749178064 NOTICE: Register - new cec device registered on cec->RPI: CEC Adapter (2708:1001) diff --git a/.install/.kodi/temp/kodi.old.log b/.install/.kodi/temp/kodi.old.log deleted file mode 100644 index 7a2d96470..000000000 --- a/.install/.kodi/temp/kodi.old.log +++ /dev/null @@ -1,357 +0,0 @@ -2020-12-13 13:53:53.373 T:2984099104 NOTICE: ----------------------------------------------------------------------- -2020-12-13 13:53:53.373 T:2984099104 NOTICE: Starting Kodi (18.9 (18.9.0) Git:20201027-6d9d93e-dirty). Platform: Linux ARM 32-bit -2020-12-13 13:53:53.374 T:2984099104 NOTICE: Using Debug Kodi x32 build -2020-12-13 13:53:53.374 T:2984099104 NOTICE: Kodi compiled 2020-11-18 by GCC 8.3.0 for Linux ARM 32-bit version 5.4.38 (328742) -2020-12-13 13:53:53.374 T:2984099104 NOTICE: Running on XBian 1.0 (knockout), kernel: Linux ARM 32-bit version 5.4.75+ -2020-12-13 13:53:53.374 T:2984099104 NOTICE: FFmpeg version/source: 4.0.4-Kodi -2020-12-13 13:53:53.374 T:2984099104 NOTICE: Host CPU: ARMv7 Processor rev 3 (v7l), 4 cores available -2020-12-13 13:53:53.374 T:2984099104 NOTICE: ARM Features: Neon enabled -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://xbmc/ is mapped to: /usr/local/share/kodi -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://xbmcbin/ is mapped to: /usr/local/lib/kodi -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://xbmcbinaddons/ is mapped to: /usr/local/lib/kodi/addons -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://masterprofile/ is mapped to: /home/xbian/.kodi/userdata -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://envhome/ is mapped to: /home/xbian -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://home/ is mapped to: /home/xbian/.kodi -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://temp/ is mapped to: /home/xbian/.kodi/temp -2020-12-13 13:53:53.374 T:2984099104 NOTICE: special://logpath/ is mapped to: /home/xbian/.kodi/temp -2020-12-13 13:53:53.374 T:2984099104 NOTICE: The executable running is: /usr/local/lib/kodi/kodi-gbm -2020-12-13 13:53:53.374 T:2984099104 NOTICE: Local hostname: cuvelima -2020-12-13 13:53:53.374 T:2984099104 NOTICE: Log File is located: /home/xbian/.kodi/temp/kodi.log -2020-12-13 13:53:53.374 T:2984099104 NOTICE: ----------------------------------------------------------------------- -2020-12-13 13:53:53.375 T:2984099104 INFO: loading settings -2020-12-13 13:53:53.377 T:2984099104 NOTICE: special://profile/ is mapped to: special://masterprofile/ -2020-12-13 13:53:53.400 T:2984099104 DEBUG: CSkinSettings: no tag found -2020-12-13 13:53:53.400 T:2984099104 NOTICE: No settings file to load (special://xbmc/system/advancedsettings.xml) -2020-12-13 13:53:53.400 T:2984099104 NOTICE: No settings file to load (special://masterprofile/advancedsettings.xml) -2020-12-13 13:53:53.400 T:2984099104 NOTICE: Default Video Player: VideoPlayer -2020-12-13 13:53:53.400 T:2984099104 NOTICE: Default Audio Player: paplayer -2020-12-13 13:53:53.400 T:2984099104 NOTICE: Disabled debug logging due to GUI setting. Level 0. -2020-12-13 13:53:53.400 T:2984099104 NOTICE: Log level changed to "LOG_LEVEL_NORMAL" -2020-12-13 13:53:53.401 T:2984099104 NOTICE: CMediaSourceSettings: loading media sources from special://masterprofile/sources.xml -2020-12-13 13:53:53.534 T:2984099104 NOTICE: PulseAudio: Server not running -2020-12-13 13:53:53.570 T:2984099104 NOTICE: Running database version Addons27 -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.2sf v2.0.3 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.asap v2.0.2 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.dumb v2.0.2 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.fluidsynth v2.1.1 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.gme v2.0.3 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.gsf v2.0.3 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.modplug v2.0.3 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.ncsf v2.0.3 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.nosefart v2.0.2 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.openmpt v2.0.4 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.organya v1.2.1 installed -2020-12-13 13:53:53.741 T:2984099104 NOTICE: ADDON: audiodecoder.qsf v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.sidplay v1.2.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.snesapu v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.ssf v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.stsound v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.timidity v2.0.5 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.upse v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.usf v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.vgmstream v1.1.5 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audiodecoder.wsr v2.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.flac v2.0.6 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.kodi.builtin.aac v1.0.0 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.kodi.builtin.wma v1.0.0 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.lame v2.0.4 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.vorbis v2.0.4 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: audioencoder.wav v2.0.3 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: game.controller.default v1.0.8 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: game.controller.snes v1.0.8 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: imagedecoder.heif v1.1.0 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: imagedecoder.mpo v1.1.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: imagedecoder.raw v2.1.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: inputstream.adaptive v2.4.6 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: inputstream.rtmp v2.0.9 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.audioengine v1.0.1 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.filesystem v1.0.2 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.general v1.0.3 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.gui v5.12.0 installed -2020-12-13 13:53:53.742 T:2984099104 NOTICE: ADDON: kodi.binary.global.main v1.0.14 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.global.network v1.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.audiodecoder v2.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.audioencoder v2.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.game v1.1.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.imagedecoder v2.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.inputstream v2.0.8 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.peripheral v1.3.7 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.pvr v5.10.3 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.screensaver v2.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.vfs v2.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.videocodec v1.0.1 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.binary.instance.visualization v2.0.1 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: kodi.resource v1.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.album.universal v3.1.3 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.artists.universal v4.3.3 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.allmusic.com v3.2.2 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.fanart.tv v3.6.3 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.imdb.com v3.1.6 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.musicbrainz.org v2.2.4 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.theaudiodb.com v2.0.3 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.common.themoviedb.org v3.2.12 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.local v1.0.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.themoviedb.org v5.2.5 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: metadata.tvshows.themoviedb.org v3.5.11 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: peripheral.joystick v1.4.9 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: peripheral.xarcade v1.1.0 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: plugin.video.arteplussept v1.0.2 installed -2020-12-13 13:53:53.743 T:2984099104 NOTICE: ADDON: plugin.video.francetv v2.0.0 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: plugin.video.vstream v0.8.3 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: plugin.xbianconfig v18.0.1 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.argustv v3.5.6 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.dvblink v4.7.3 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.dvbviewer v3.7.13 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.filmon v2.4.6 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.hdhomerun v3.5.1 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.hts v4.4.21 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.iptvarchive v3.7.2 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.iptvsimple v3.9.8 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.mediaportal.tvserver v3.5.19 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.mythtv v5.10.19 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.nextpvr v3.3.21 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.njoy v3.4.3 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.octonet v0.7.1 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.pctv v2.4.7 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.stalker v3.4.10 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.teleboy v18.2.3 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.vbox v4.7.0 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.vdr.vnsi v3.6.4 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.vuplus v3.28.9 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.wmc v2.4.6 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: pvr.zattoo v18.1.21 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: repository.superrepo.org.gotham.all v0.5.206 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: repository.vstream v0.0.4 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: repository.xbmc.org v3.1.6 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: resource.images.weathericons.default v1.1.8 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: resource.language.en_gb v2.0.1 installed -2020-12-13 13:53:53.744 T:2984099104 NOTICE: ADDON: resource.language.fr_fr v9.0.24 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: resource.uisounds.kodi v1.0.0 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: screensaver.shadertoy v2.0.0 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: screensaver.xbmc.builtin.black v1.0.33 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: screensaver.xbmc.builtin.dim v1.0.59 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.beautifulsoup4 v4.6.2 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.certifi v2019.9.11 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.chardet v3.0.4 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.idna v2.8 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.inputstreamhelper v0.5.1 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.pil v1.1.7 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.pycryptodome v3.4.3 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.requests v2.22.0 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.simplejson v3.16.1 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.urllib3 v1.25.6 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.module.xbmcswift2 v13.0.2 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: script.service.xbian.upstart-bridge v2.0.2 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: service.xbmc.versioncheck v0.5.12 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: skin.estouchy v2.0.28 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: skin.estuary v2.0.27 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.libarchive v1.0.7 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.rar v2.3.2 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.sacd v1.0.4 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: vfs.sftp v1.0.6 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: visualization.shadertoy v1.2.4 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: visualization.spectrum v3.0.4 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: visualization.waveform v3.1.2 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: webinterface.default v18.x-2.4.6 installed -2020-12-13 13:53:53.745 T:2984099104 NOTICE: ADDON: xbmc.addon v18.9 installed -2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.core v0.1.0 installed -2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.gui v5.14.0 installed -2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.json v10.3.0 installed -2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.metadata v2.1.0 installed -2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.python v2.26.0 installed -2020-12-13 13:53:53.746 T:2984099104 NOTICE: ADDON: xbmc.webinterface v1.0.0 installed -2020-12-13 13:53:54.073 T:2984099104 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanSuspend” -2020-12-13 13:53:54.073 T:2984099104 ERROR: DBus error: org.freedesktop.DBus.Error.InvalidArgs - No such property “CanHibernate” -2020-12-13 13:53:54.100 T:2952786128 NOTICE: Found 2 Lists of Devices -2020-12-13 13:53:54.100 T:2952786128 NOTICE: Enumerated ALSA devices: -2020-12-13 13:53:54.100 T:2952786128 NOTICE: Device 1 -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_deviceName : default -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_displayName : Default (bcm2835 HDMI 1 bcm2835 HDMI 1) -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000 -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8 -2020-12-13 13:53:54.100 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 13:53:54.100 T:2952786128 NOTICE: Device 2 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=b1 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayName : bcm2835 HDMI 1 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 HDMI 1 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 13:53:54.101 T:2952786128 NOTICE: Device 3 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceName : sysdefault:CARD=Headphones -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayName : bcm2835 Headphones -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayNameExtra: bcm2835 Headphones -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_channels : FL, FR, BL, BR, FC, LFE, SL, SR -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,32000,44100,48000,64000,88200,96000,176400,192000 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_dataFormats : AE_FMT_S16NE,AE_FMT_S16LE,AE_FMT_U8 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 13:53:54.101 T:2952786128 NOTICE: Enumerated PI devices: -2020-12-13 13:53:54.101 T:2952786128 NOTICE: Device 1 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceName : HDMI -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayName : HDMI -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_HDMI -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_channels : FL, FR -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_sampleRates : 8000,11025,16000,22050,24000,32000,44100,48000,88200,96000,176400,192000 -2020-12-13 13:53:54.101 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32NE,AE_FMT_S16NE,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP,AE_FMT_RAW -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_streamTypes : STREAM_TYPE_AC3,STREAM_TYPE_EAC3,STREAM_TYPE_DTSHD_CORE,STREAM_TYPE_DTS_2048,STREAM_TYPE_DTS_1024,STREAM_TYPE_DTS_512 -2020-12-13 13:53:54.102 T:2952786128 NOTICE: Device 2 -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceName : Analogue -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayName : Analogue -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_channels : FL, FR -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_sampleRates : 48000 -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 13:53:54.102 T:2952786128 NOTICE: Device 3 -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceName : Both -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayName : HDMI and Analogue -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_displayNameExtra: -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_deviceType : AE_DEVTYPE_PCM -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_channels : FL, FR -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_sampleRates : 48000 -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_dataFormats : AE_FMT_FLOAT,AE_FMT_S32LE,AE_FMT_S16LE,AE_FMT_FLOATP,AE_FMT_S32NEP,AE_FMT_S16NEP -2020-12-13 13:53:54.102 T:2952786128 NOTICE: m_streamTypes : No passthrough capabilities -2020-12-13 13:53:54.379 T:2984099104 NOTICE: Raspberry PI firmware version: Jul 13 2020 13:56:29 - Copyright (c) 2012 Broadcom - version adcebbdb7b415c623931e80795ba3bae68dcc4fa (clean) (release) (start_x) -2020-12-13 13:53:54.379 T:2984099104 NOTICE: ARM mem: 704MB GPU mem: 320MB MPG2:0 WVC1:0 -2020-12-13 13:53:54.379 T:2984099104 NOTICE: cache.memorysize: 20MB libass.cache: 0MB -2020-12-13 13:53:54.379 T:2984099104 NOTICE: Config: - arm_freq=1500 - audio_pwm_mode=514 - config_hdmi_boost=5 - core_freq=500 - core_freq_min=200 - disable_commandline_tags=2 - disable_l2cache=1 - disable_overscan=1 - disable_splash=1 - display_hdmi_rotate=-1 - display_lcd_rotate=-1 - enable_gic=1 - force_eeprom_read=1 - force_pwm_open=1 - framebuffer_ignore_alpha=1 - framebuffer_swap=1 - gpu_freq=500 - gpu_freq_min=250 - init_uart_clock=0x2dc6c00 - initial_turbo=3 - lcd_framerate=60 - mask_gpu_interrupt0=1024 - mask_gpu_interrupt1=0x10000 - max_framebuffers=2 - over_voltage_avs=-20000 - pause_burst_frames=1 - program_serial_random=1 - total_mem=8192 - hdmi_force_cec_address:0=65535 - hdmi_force_cec_address:1=65535 - hdmi_ignore_cec_init:0=1 - hdmi_pixel_freq_limit:0=0x11e1a300 - hdmi_pixel_freq_limit:1=0x11e1a300 -2020-12-13 13:53:54.380 T:2984099104 NOTICE: Config: - decode_MPG2=0x00000000 - decode_WVC1=0x00000000 - device_tree=- - overlay_prefix=overlays/ - hdmi_cvt:0= - hdmi_cvt:1= - hdmi_edid_filename:0= - hdmi_edid_filename:1= - hdmi_timings:0= - hdmi_timings:1= -2020-12-13 13:53:55.057 T:2984099104 WARNING: CDRMUtils::FindPlane - could not find plane -2020-12-13 13:53:55.058 T:2984099104 WARNING: CDRMUtils::InitDrm - failed to set drm master, will try to authorize instead: Permission denied -2020-12-13 13:53:55.058 T:2984099104 NOTICE: CDRMUtils::InitDrm - successfully authorized drm magic -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 60.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 59.940063 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 60.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 59.940063 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080 @ 50.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1920x1080 with 1920x1080i @ 50.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x768 with 1280x768 @ 60.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x720 with 1280x720 @ 60.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x720 with 1280x720 @ 59.940063 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1280x720 with 1280x720 @ 50.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1024x768 with 1024x768 @ 75.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1024x768 with 1024x768 @ 70.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 1024x768 with 1024x768 @ 60.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 75.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 72.000000 Hz -2020-12-13 13:53:57.063 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 60.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 800x600 with 800x600 @ 56.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x576 with 720x576 @ 50.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x576 with 720x576i @ 50.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480 @ 59.940063 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480 @ 60.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480i @ 59.940063 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x480 with 720x480i @ 60.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 640x480 with 640x480 @ 75.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 640x480 with 640x480 @ 73.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 640x480 with 640x480 @ 60.000000 Hz -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Previous line repeats 1 times. -2020-12-13 13:53:57.064 T:2984099104 NOTICE: Found resolution 720x400 with 720x400 @ 70.000000 Hz -2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_VERSION = 1.4 -2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_VENDOR = Mesa Project -2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_EXTENSIONS = EGL_EXT_buffer_age EGL_EXT_image_dma_buf_import EGL_EXT_image_dma_buf_import_modifiers EGL_KHR_cl_event2 EGL_KHR_config_attribs EGL_KHR_create_context EGL_KHR_create_context_no_error EGL_KHR_fence_sync EGL_KHR_get_all_proc_addresses EGL_KHR_gl_colorspace EGL_KHR_gl_renderbuffer_image EGL_KHR_gl_texture_2D_image EGL_KHR_gl_texture_3D_image EGL_KHR_gl_texture_cubemap_image EGL_KHR_image EGL_KHR_image_base EGL_KHR_image_pixmap EGL_KHR_no_config_context EGL_KHR_reusable_sync EGL_KHR_surfaceless_context EGL_EXT_pixel_format_float EGL_KHR_wait_sync EGL_MESA_configless_context EGL_MESA_drm_image EGL_MESA_image_dma_buf_export EGL_WL_bind_wayland_display -2020-12-13 13:53:57.083 T:2984099104 NOTICE: EGL_CLIENT_EXTENSIONS = EGL_EXT_device_base EGL_EXT_device_enumeration EGL_EXT_device_query EGL_EXT_platform_base EGL_KHR_client_get_all_proc_addresses EGL_EXT_client_extensions EGL_KHR_debug EGL_EXT_platform_wayland EGL_EXT_platform_x11 EGL_MESA_platform_gbm EGL_MESA_platform_surfaceless -2020-12-13 13:53:57.087 T:2984099104 NOTICE: Checking resolution 16 -2020-12-13 13:53:57.087 T:2984099104 WARNING: CGBMUtils::DestroySurface - surface already destroyed -2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_VENDOR = Broadcom -2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_RENDERER = V3D 4.2 -2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_VERSION = OpenGL ES 3.0 Mesa 19.1.7 (git-b9d7244035) -2020-12-13 13:53:57.134 T:2984099104 NOTICE: GL_SHADING_LANGUAGE_VERSION = OpenGL ES GLSL ES 3.00 -2020-12-13 13:53:57.135 T:2984099104 NOTICE: GL_EXTENSIONS = GL_EXT_blend_minmax GL_EXT_multi_draw_arrays GL_EXT_texture_format_BGRA8888 GL_OES_compressed_ETC1_RGB8_texture GL_OES_depth24 GL_OES_element_index_uint GL_OES_fbo_render_mipmap GL_OES_mapbuffer GL_OES_rgb8_rgba8 GL_OES_standard_derivatives GL_OES_stencil8 GL_OES_texture_3D GL_OES_texture_float GL_OES_texture_half_float GL_OES_texture_half_float_linear GL_OES_texture_npot GL_OES_vertex_half_float GL_EXT_texture_sRGB_decode GL_OES_EGL_image GL_OES_depth_texture GL_OES_packed_depth_stencil GL_EXT_texture_type_2_10_10_10_REV GL_OES_get_program_binary GL_APPLE_texture_max_level GL_EXT_discard_framebuffer GL_EXT_read_format_bgra GL_EXT_frag_depth GL_NV_fbo_color_attachments GL_OES_EGL_image_external GL_OES_EGL_sync GL_OES_vertex_array_object GL_EXT_occlusion_query_boolean GL_EXT_texture_rg GL_EXT_unpack_subimage GL_NV_draw_buffers GL_NV_read_buffer GL_NV_read_depth GL_NV_read_depth_stencil GL_NV_read_stencil GL_EXT_draw_buffers GL_EXT_map_buffer_range GL_KHR_debug GL_KHR_texture_compression_astc_ldr GL_OES_depth_texture_cube_map GL_OES_required_internalformat GL_OES_surfaceless_context GL_EXT_color_buffer_float GL_EXT_sRGB_write_control GL_EXT_separate_shader_objects GL_EXT_shader_integer_mix GL_EXT_base_instance GL_EXT_compressed_ETC1_RGB8_sub_texture GL_EXT_draw_elements_base_vertex GL_EXT_texture_border_clamp GL_KHR_context_flush_control GL_OES_draw_elements_base_vertex GL_OES_texture_border_clamp GL_OES_texture_stencil8 GL_EXT_float_blend GL_KHR_no_error GL_KHR_texture_compression_astc_sliced_3d GL_OES_EGL_image_external_essl3 GL_MESA_shader_integer_functions GL_KHR_parallel_shader_compile GL_EXT_texture_query_lod -2020-12-13 13:53:58.071 T:2984099104 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption -2020-12-13 13:53:58.071 T:2984099104 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled! -2020-12-13 13:53:58.277 T:2802258128 NOTICE: Running database version Addons27 -2020-12-13 13:53:58.279 T:2802258128 NOTICE: Running database version ViewModes6 -2020-12-13 13:53:58.281 T:2802258128 NOTICE: Running database version Textures13 -2020-12-13 13:53:58.287 T:2802258128 NOTICE: Running database version MyMusic72 -2020-12-13 13:53:58.297 T:2802258128 NOTICE: Running database version MyVideos116 -2020-12-13 13:53:58.301 T:2802258128 NOTICE: Running database version TV32 -2020-12-13 13:53:58.303 T:2802258128 NOTICE: Running database version Epg12 -2020-12-13 13:53:58.321 T:2984099104 NOTICE: start dvd mediatype detection -2020-12-13 13:53:58.376 T:2984099104 NOTICE: load skin from: /usr/local/share/kodi/addons/skin.estuary (version: 2.0.27) -2020-12-13 13:53:58.845 T:2802258128 WARNING: Repository has MD5 hashes enabled - this hash function is broken and will only guard against unintentional data corruption -2020-12-13 13:53:58.845 T:2802258128 WARNING: Repository add-on repository.superrepo.org.gotham.all uses plain HTTP for add-on downloads in path http://redirect.superrepo.org/v5/addons/ - this is insecure and will make your Kodi installation vulnerable to attacks if enabled! -2020-12-13 13:53:58.852 T:2984099104 WARNING: JSONRPC: Could not parse type "Setting.Details.SettingList" -2020-12-13 13:53:58.924 T:2984099104 NOTICE: Register - new keyboard device registered on application->keyboard: Clavier (0000:0000) -2020-12-13 13:53:58.925 T:2984099104 NOTICE: Register - new mouse device registered on application->mouse: Souris (0000:0000) -2020-12-13 13:53:58.929 T:2984099104 NOTICE: Loading player core factory settings from special://xbmc/system/playercorefactory.xml. -2020-12-13 13:53:58.929 T:2984099104 NOTICE: Loaded playercorefactory configuration -2020-12-13 13:53:58.930 T:2984099104 NOTICE: Loading player core factory settings from special://masterprofile/playercorefactory.xml. -2020-12-13 13:53:58.930 T:2984099104 NOTICE: special://masterprofile/playercorefactory.xml does not exist. Skipping. -2020-12-13 13:53:58.963 T:2984099104 NOTICE: initialize done -2020-12-13 13:53:58.963 T:2984099104 NOTICE: XBian: notifying Upstart that I'm well -2020-12-13 13:53:59.127 T:2984099104 NOTICE: Running the application... -2020-12-13 13:53:59.133 T:2984099104 NOTICE: starting zeroconf publishing -2020-12-13 13:53:59.135 T:2984099104 NOTICE: CWebServer[8080]: Started -2020-12-13 13:53:59.140 T:2655494352 NOTICE: ES: Starting UDP Event server on port 9777 -2020-12-13 13:53:59.141 T:2655494352 NOTICE: UDP: Listening on port 9777 (ipv6 : false) -2020-12-13 13:53:59.304 T:2749124816 NOTICE: Register - new cec device registered on cec->RPI: CEC Adapter (2708:1001) -2020-12-13 14:18:55.251 T:2984099104 NOTICE: Stopping player -2020-12-13 14:18:55.251 T:2984099104 NOTICE: Storing total System Uptime -2020-12-13 14:18:55.251 T:2984099104 NOTICE: Saving settings -2020-12-13 14:18:55.255 T:2984099104 NOTICE: Saving skin settings -2020-12-13 14:18:55.358 T:2984099104 NOTICE: Stopping all -2020-12-13 14:18:55.358 T:2984099104 NOTICE: ES: Stopping event server -2020-12-13 14:18:55.358 T:2984099104 NOTICE: stopping zeroconf publishing -2020-12-13 14:18:55.364 T:2984099104 NOTICE: CWebServer[8080]: Stopped -2020-12-13 14:18:55.758 T:2655494352 NOTICE: ES: UDP Event server stopped -2020-12-13 14:18:55.770 T:2984099104 NOTICE: stop dvd detect media -2020-12-13 14:18:56.161 T:2984099104 NOTICE: Application stopped -2020-12-13 14:18:56.380 T:2984099104 ERROR: DBus error: org.freedesktop.DBus.Error.IOError - Input/output error -2020-12-13 14:18:56.380 T:2984099104 NOTICE: XBApplicationEx: destroying... -2020-12-13 14:18:58.510 T:2984099104 NOTICE: unload skin -2020-12-13 14:18:58.552 T:2984099104 NOTICE: unload sections -2020-12-13 14:18:59.119 T:2984099104 NOTICE: XBApplicationEx: application stopped! diff --git a/.install/.kodi/userdata/Database/Addons27.db b/.install/.kodi/userdata/Database/Addons27.db index 76ee780cb..f186a9319 100644 Binary files a/.install/.kodi/userdata/Database/Addons27.db and b/.install/.kodi/userdata/Database/Addons27.db differ diff --git a/.install/.kodi/userdata/Database/MyVideos116.db b/.install/.kodi/userdata/Database/MyVideos116.db index 30c384eb0..5b4b0a219 100644 Binary files a/.install/.kodi/userdata/Database/MyVideos116.db and b/.install/.kodi/userdata/Database/MyVideos116.db differ diff --git a/.install/.kodi/userdata/Database/Textures13.db b/.install/.kodi/userdata/Database/Textures13.db deleted file mode 100644 index a8b9edd3e..000000000 Binary files a/.install/.kodi/userdata/Database/Textures13.db and /dev/null differ diff --git a/.install/.kodi/userdata/Database/ViewModes6.db b/.install/.kodi/userdata/Database/ViewModes6.db index b13fd464a..9fb1e6227 100644 Binary files a/.install/.kodi/userdata/Database/ViewModes6.db and b/.install/.kodi/userdata/Database/ViewModes6.db differ diff --git a/.install/.kodi/userdata/Thumbnails/0/0562359b.png b/.install/.kodi/userdata/Thumbnails/0/0562359b.png deleted file mode 100644 index 7f97c85ae..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/0/0562359b.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/0/09d2e90e.jpg b/.install/.kodi/userdata/Thumbnails/0/09d2e90e.jpg deleted file mode 100644 index 4248d032a..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/0/09d2e90e.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/0/0c782ba5.png b/.install/.kodi/userdata/Thumbnails/0/0c782ba5.png deleted file mode 100644 index fd5371d48..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/0/0c782ba5.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/0/0da69cb4.jpg b/.install/.kodi/userdata/Thumbnails/0/0da69cb4.jpg deleted file mode 100644 index 5f4cffbe9..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/0/0da69cb4.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/1/163488dd.png b/.install/.kodi/userdata/Thumbnails/1/163488dd.png deleted file mode 100644 index 982ed25fd..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/1/163488dd.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/1/1dc90357.png b/.install/.kodi/userdata/Thumbnails/1/1dc90357.png deleted file mode 100644 index e70974b9f..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/1/1dc90357.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/1/1e8cabe0.png b/.install/.kodi/userdata/Thumbnails/1/1e8cabe0.png deleted file mode 100644 index 65e92b6df..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/1/1e8cabe0.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/1/1fe20bad.png b/.install/.kodi/userdata/Thumbnails/1/1fe20bad.png deleted file mode 100644 index 8283731f9..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/1/1fe20bad.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/2/2218c381.jpg b/.install/.kodi/userdata/Thumbnails/2/2218c381.jpg deleted file mode 100644 index 9a40d887a..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/2/2218c381.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/2/228846d1.png b/.install/.kodi/userdata/Thumbnails/2/228846d1.png deleted file mode 100644 index 80c464cd2..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/2/228846d1.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/2/230b0f94.jpg b/.install/.kodi/userdata/Thumbnails/2/230b0f94.jpg deleted file mode 100644 index d12157b60..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/2/230b0f94.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/3/33e341a9.jpg b/.install/.kodi/userdata/Thumbnails/3/33e341a9.jpg deleted file mode 100644 index fa465d35e..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/3/33e341a9.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/3/3894fe5d.jpg b/.install/.kodi/userdata/Thumbnails/3/3894fe5d.jpg deleted file mode 100644 index 37834ccd1..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/3/3894fe5d.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/3/3a8ed83f.jpg b/.install/.kodi/userdata/Thumbnails/3/3a8ed83f.jpg deleted file mode 100644 index fa465d35e..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/3/3a8ed83f.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/4/41d78f97.png b/.install/.kodi/userdata/Thumbnails/4/41d78f97.png deleted file mode 100644 index ddb7bc329..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/4/41d78f97.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/4/48bafe5e.jpg b/.install/.kodi/userdata/Thumbnails/4/48bafe5e.jpg deleted file mode 100644 index f79f12f74..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/4/48bafe5e.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/4/49dafe0f.jpg b/.install/.kodi/userdata/Thumbnails/4/49dafe0f.jpg deleted file mode 100644 index 88c1f88bb..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/4/49dafe0f.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/4/4ae0fd0a.jpg b/.install/.kodi/userdata/Thumbnails/4/4ae0fd0a.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/4/4ae0fd0a.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/4/4f719d58.png b/.install/.kodi/userdata/Thumbnails/4/4f719d58.png deleted file mode 100644 index 35ed3c447..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/4/4f719d58.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/52fb809f.png b/.install/.kodi/userdata/Thumbnails/5/52fb809f.png deleted file mode 100644 index a2a87de57..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/52fb809f.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/557763a5.jpg b/.install/.kodi/userdata/Thumbnails/5/557763a5.jpg deleted file mode 100644 index 565725bd4..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/557763a5.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/56cde9fa.jpg b/.install/.kodi/userdata/Thumbnails/5/56cde9fa.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/56cde9fa.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/57b5bfce.jpg b/.install/.kodi/userdata/Thumbnails/5/57b5bfce.jpg deleted file mode 100644 index 6941f4415..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/57b5bfce.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/5b9d7b56.png b/.install/.kodi/userdata/Thumbnails/5/5b9d7b56.png deleted file mode 100644 index dc940ff38..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/5b9d7b56.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/5ba4d498.jpg b/.install/.kodi/userdata/Thumbnails/5/5ba4d498.jpg deleted file mode 100644 index 81595536e..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/5ba4d498.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/5bb33702.png b/.install/.kodi/userdata/Thumbnails/5/5bb33702.png deleted file mode 100644 index 5e415de34..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/5bb33702.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/5bc908e2.png b/.install/.kodi/userdata/Thumbnails/5/5bc908e2.png deleted file mode 100644 index fb7fa8db7..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/5bc908e2.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/5/5ce7e216.png b/.install/.kodi/userdata/Thumbnails/5/5ce7e216.png deleted file mode 100644 index c01b316d4..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/5/5ce7e216.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/6/69ca5288.png b/.install/.kodi/userdata/Thumbnails/6/69ca5288.png deleted file mode 100644 index d03ea5d23..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/6/69ca5288.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/6/6b3c0c32.png b/.install/.kodi/userdata/Thumbnails/6/6b3c0c32.png deleted file mode 100644 index 06a9856ec..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/6/6b3c0c32.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/6/6fcacd67.jpg b/.install/.kodi/userdata/Thumbnails/6/6fcacd67.jpg deleted file mode 100644 index 5f4cffbe9..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/6/6fcacd67.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/8/808a55f7.jpg b/.install/.kodi/userdata/Thumbnails/8/808a55f7.jpg deleted file mode 100644 index 6a88ebd33..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/8/808a55f7.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/8/8977c3a3.png b/.install/.kodi/userdata/Thumbnails/8/8977c3a3.png deleted file mode 100644 index 85e2ed447..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/8/8977c3a3.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/9/94234470.jpg b/.install/.kodi/userdata/Thumbnails/9/94234470.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/9/94234470.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/a5d64abb.png b/.install/.kodi/userdata/Thumbnails/a/a5d64abb.png deleted file mode 100644 index 0f0a1ffbc..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/a5d64abb.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/a5e4a176.png b/.install/.kodi/userdata/Thumbnails/a/a5e4a176.png deleted file mode 100644 index cc633d541..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/a5e4a176.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/aa93a2f6.jpg b/.install/.kodi/userdata/Thumbnails/a/aa93a2f6.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/aa93a2f6.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/abb05214.jpg b/.install/.kodi/userdata/Thumbnails/a/abb05214.jpg deleted file mode 100644 index 068a773f7..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/abb05214.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/ac0efbca.png b/.install/.kodi/userdata/Thumbnails/a/ac0efbca.png deleted file mode 100644 index c43c57b4b..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/ac0efbca.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/ac0fd81a.png b/.install/.kodi/userdata/Thumbnails/a/ac0fd81a.png deleted file mode 100644 index b9437e41a..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/ac0fd81a.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/a/af1a3233.jpg b/.install/.kodi/userdata/Thumbnails/a/af1a3233.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/a/af1a3233.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/b/b44affb0.png b/.install/.kodi/userdata/Thumbnails/b/b44affb0.png deleted file mode 100644 index 77070184b..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/b/b44affb0.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/b/b71e7b6c.png b/.install/.kodi/userdata/Thumbnails/b/b71e7b6c.png deleted file mode 100644 index c43c57b4b..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/b/b71e7b6c.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/c/c11b8e04.jpg b/.install/.kodi/userdata/Thumbnails/c/c11b8e04.jpg deleted file mode 100644 index 565725bd4..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/c/c11b8e04.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/c/c3592a77.png b/.install/.kodi/userdata/Thumbnails/c/c3592a77.png deleted file mode 100644 index a094d4e36..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/c/c3592a77.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/c/c7e556fc.jpg b/.install/.kodi/userdata/Thumbnails/c/c7e556fc.jpg deleted file mode 100644 index 15d2dae88..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/c/c7e556fc.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/c/cbda80a5.jpg b/.install/.kodi/userdata/Thumbnails/c/cbda80a5.jpg deleted file mode 100644 index 0fdc6eb66..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/c/cbda80a5.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/c/cfd55d9e.jpg b/.install/.kodi/userdata/Thumbnails/c/cfd55d9e.jpg deleted file mode 100644 index 37834ccd1..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/c/cfd55d9e.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/d/d12803d9.png b/.install/.kodi/userdata/Thumbnails/d/d12803d9.png deleted file mode 100644 index 49ea80c88..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/d/d12803d9.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/d/d2c983a6.jpg b/.install/.kodi/userdata/Thumbnails/d/d2c983a6.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/d/d2c983a6.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/d/da7c3aaa.png b/.install/.kodi/userdata/Thumbnails/d/da7c3aaa.png deleted file mode 100644 index 438a9755c..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/d/da7c3aaa.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/d/dc5a0d26.jpg b/.install/.kodi/userdata/Thumbnails/d/dc5a0d26.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/d/dc5a0d26.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/d/dd290f35.jpg b/.install/.kodi/userdata/Thumbnails/d/dd290f35.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/d/dd290f35.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/d/dea41009.jpg b/.install/.kodi/userdata/Thumbnails/d/dea41009.jpg deleted file mode 100644 index a20b1fac6..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/d/dea41009.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/e/e2a80d9a.jpg b/.install/.kodi/userdata/Thumbnails/e/e2a80d9a.jpg deleted file mode 100644 index a4e7b77c8..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/e/e2a80d9a.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/e/e47f7b27.jpg b/.install/.kodi/userdata/Thumbnails/e/e47f7b27.jpg deleted file mode 100644 index 6a88ebd33..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/e/e47f7b27.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/e/e62daf11.png b/.install/.kodi/userdata/Thumbnails/e/e62daf11.png deleted file mode 100644 index d76b635e5..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/e/e62daf11.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/e/e93d5c85.png b/.install/.kodi/userdata/Thumbnails/e/e93d5c85.png deleted file mode 100644 index 0191330e6..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/e/e93d5c85.png and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/e/ec9c7b3b.jpg b/.install/.kodi/userdata/Thumbnails/e/ec9c7b3b.jpg deleted file mode 100644 index a20b1fac6..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/e/ec9c7b3b.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/f/f7021412.jpg b/.install/.kodi/userdata/Thumbnails/f/f7021412.jpg deleted file mode 100644 index 496696746..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/f/f7021412.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/f/f94acfb4.jpg b/.install/.kodi/userdata/Thumbnails/f/f94acfb4.jpg deleted file mode 100644 index 439f6505e..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/f/f94acfb4.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/f/f97a02b1.jpg b/.install/.kodi/userdata/Thumbnails/f/f97a02b1.jpg deleted file mode 100644 index 752dd19b5..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/f/f97a02b1.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/Thumbnails/f/fae14ceb.jpg b/.install/.kodi/userdata/Thumbnails/f/fae14ceb.jpg deleted file mode 100644 index a20b1fac6..000000000 Binary files a/.install/.kodi/userdata/Thumbnails/f/fae14ceb.jpg and /dev/null differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Captcha.raw b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Captcha.raw new file mode 100644 index 000000000..41d27f1c6 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Captcha.raw differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_www_zt-za_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_www_zt-za_com.txt new file mode 100644 index 000000000..3d46abfe4 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_www_zt-za_com.txt @@ -0,0 +1 @@ +swp_token=1605043494:0cb63ecc71f6d029418e827aed3e7c9f:c8201b8ffdb3703b1d01884bce4f7584 \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_com.txt new file mode 100644 index 000000000..ec58db767 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_com.txt @@ -0,0 +1 @@ +swp_token=1603922533:31aac141fc8c1f57a856ac4be928d2c0:067e0bd7719ed36e424bd1d672ebecdb \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_net.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_net.txt new file mode 100644 index 000000000..535c7f72f --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/Cookie_zt-protect_net.txt @@ -0,0 +1 @@ +swp_token=1603922519:96ed2062fc796339ee60eb07e53b8a30:f904797b23b7e171c91e754aaf39ae74 \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/challenge.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/challenge.png new file mode 100644 index 000000000..ebde863e7 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/challenge.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_cinemegatoil_org.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_cinemegatoil_org.txt new file mode 100644 index 000000000..eefdbe3d2 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_cinemegatoil_org.txt @@ -0,0 +1 @@ +PHPSESSID=78e4d3599bdbe46db4d8552a0eaa5882 \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_liens_free-telechargement_org.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_liens_free-telechargement_org.txt new file mode 100644 index 000000000..d772dc358 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_liens_free-telechargement_org.txt @@ -0,0 +1 @@ +; __cfduid=d47ef5a3629c709aa8d6405f7b7e2de881603912323;PHPSESSID=2ckoftm7qnkt1ljgjst87tfdf5; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wvw_zone-annuaire_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wvw_zone-annuaire_com.txt new file mode 100644 index 000000000..e2bfeebd6 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wvw_zone-annuaire_com.txt @@ -0,0 +1 @@ +PHPSESSID=78bc92d76f997ed1ec958b412cebb4f7;cf_clearance=3e9d1b257422bb4b4d348e6bcb36820a1409763c-1587067237-0-150; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wwv_zone-annuaire_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wwv_zone-annuaire_com.txt new file mode 100644 index 000000000..80e2e2f10 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_wwv_zone-annuaire_com.txt @@ -0,0 +1 @@ +cf_clearance=8b2bd89b3c1f704fd5d93990e651764031dbae6f-1586961259-0-150; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www2_zone-warez_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www2_zone-warez_com.txt new file mode 100644 index 000000000..18a1d7732 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www2_zone-warez_com.txt @@ -0,0 +1 @@ +cf_clearance=a4d987f691ebfc88e00e8ca2ec3a64d2e460c4d8-1587069524-0-150;PHPSESSID=f1ee2m94s8au1m6n70d1e6hn54;dle_cache=yes; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_dl-protect1_co.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_dl-protect1_co.txt new file mode 100644 index 000000000..a815afb8f --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_dl-protect1_co.txt @@ -0,0 +1 @@ +cf_clearance=360d43600100367bf69621b836c7542bb1b46afe-1586420453-0-150; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_extreme-down_ninja.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_extreme-down_ninja.txt new file mode 100644 index 000000000..4a9195773 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_extreme-down_ninja.txt @@ -0,0 +1 @@ +cf_clearance=497f80648d29715ba0ec64db468cf5cdc6302226-1587239340-0-150;PHPSESSID=hgktt7llree1pvfvijba52f8q1;ed_last_visit=1587246540; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_ianimes_org.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_ianimes_org.txt new file mode 100644 index 000000000..2ea90642f --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_ianimes_org.txt @@ -0,0 +1 @@ +__cf_bm=1fa31255d612253a9c312090b73adfe7e0d89ecb-1587075762-1800-Aalb3+NMEpaapK0WWv4+tlNo90yVcKCNOfR3a5mleHinzRNTFz2Z43pyLcbqUdMNjrWM4VR0dQpprObT9iUP8I8=; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_neko-sama_fr.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_neko-sama_fr.txt new file mode 100644 index 000000000..b843a9d36 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_neko-sama_fr.txt @@ -0,0 +1 @@ +cf_clearance=5d9a052ef4b923889d250eea819e7e8b25300f59-1587066184-0-150; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zt-za_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zt-za_com.txt new file mode 100644 index 000000000..0f9ef5dae --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zt-za_com.txt @@ -0,0 +1 @@ +swp_token=1607198571:9341cdb1e538727625702349a5a0ddd7:4044fb8c7c44a4890c67a9c17c77fbc4 \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zustream_biz.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zustream_biz.txt new file mode 100644 index 000000000..f59946310 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_www_zustream_biz.txt @@ -0,0 +1 @@ +starstruck_0e1ee930605fd5ae5b10792311eb44d8=369447d3649ec249d188af0c95195c03;cf_clearance=fab6f52e4affbf45915501f908ce6640ba594d58-1587239427-0-150; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_zt-protect_com.txt b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_zt-protect_com.txt new file mode 100644 index 000000000..3d682430e --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/cookie_zt-protect_com.txt @@ -0,0 +1 @@ +zt_protect_session=eyJpdiI6Im9uclVNNFRPZVdHRUUyZE1KZVVWbUE9PSIsInZhbHVlIjoic285TElyYTR2ZkI3OThZaENXbGxjTUdrNXFlZWR2TWpmOGFaNW9wTmVORGZyRGEySDlCU0VUWTdPYnFKTXRMaSIsIm1hYyI6ImJiYzkzYTkwMjY1MzMwNjEzNTgyMmUxZTk3MzUxNjQ2ZTNkZGU0MzJlNGU0OGY4OTdiNTM5MDEyZTI2OWUyODgifQ%3D%3D;XSRF-TOKEN=eyJpdiI6InZMd2xmblJLRllBUEhCcit0dnY0MHc9PSIsInZhbHVlIjoieGpOSjlmTGhKemExdGkwNENtR3UrVm5UWEhKWHBUXC9Qc01tN1ZMMTFHcEo0UjFHUGZCZ01JNEVjZDQ1Z3ozNEoiLCJtYWMiOiJjZTgyM2MxZDkxN2U4ZTNjOWU1MzAxZDczNmQ5ZjU1N2U2M2VmMGQwMTQ1MzE3ZDVmNDJiM2ZhZTE5NDViZmZhIn0%3D; \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml b/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml index 362cd5b5e..b21e1f24c 100644 --- a/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml +++ b/.install/.kodi/userdata/addon_data/plugin.video.vstream/settings.xml @@ -1,15 +1,15 @@ 500 false - 92ab39516970ab9d86396866456ec9b6 + e275e5c42fc0f498521343e1e19a4479 w1280 false true lightcoral 50 - - false + /home/xbian/astroport/films/ + true false 1 true @@ -51,17 +51,21 @@ special://userdata/addon_data/plugin.video.vstream/Films special://userdata/addon_data/plugin.video.vstream/Series 0 - false + true 500 - Films - Animes - - + FASTRXBIAN + AASTRXBIAN + + films + animes + /home/xbian/ + special://userdata/addon_data/plugin.video.vstream/Enregistrement 1 2 true true + true true true true @@ -70,6 +74,7 @@ true true true + true true true true @@ -79,6 +84,7 @@ true true true + true true true true @@ -95,10 +101,13 @@ true true true + true true true true true + true + true true true true @@ -108,37 +117,55 @@ true true true + true + true true true + true + true true true + true + true true true true + true true true + true true + true true + true + true true true + true + true true true true + true true + true true true true true true + true + true + true true w342 500 - + 0.8.3 - 2020-12-12 02:55:53.502862 + 2020-12-14 22:22:54.593849 0.8.3 false - + https://www.zone-warez.com/ @@ -162,5 +189,5 @@ false 500 - + https://www.zt-za.com/ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test0.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test0.png new file mode 100644 index 000000000..7474ae799 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test0.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test1.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test1.png new file mode 100644 index 000000000..0085cc3f2 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test1.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test2.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test2.png new file mode 100644 index 000000000..0eae90034 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test2.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test3.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test3.png new file mode 100644 index 000000000..d81c87eac Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test3.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test4.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test4.png new file mode 100644 index 000000000..8862ca5f1 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test4.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test5.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test5.png new file mode 100644 index 000000000..c558fad99 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test5.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test6.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test6.png new file mode 100644 index 000000000..7670bab38 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test6.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test7.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test7.png new file mode 100644 index 000000000..9bd52555b Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test7.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/test8.png b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test8.png new file mode 100644 index 000000000..0b3a9195c Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/test8.png differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/video_cache.db b/.install/.kodi/userdata/addon_data/plugin.video.vstream/video_cache.db new file mode 100644 index 000000000..dc9b08c83 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/video_cache.db differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db b/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db index 4caa54a00..21bcd7036 100644 Binary files a/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db and b/.install/.kodi/userdata/addon_data/plugin.video.vstream/vstream.db differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/access_manager.json b/.install/.kodi/userdata/addon_data/plugin.video.youtube/access_manager.json new file mode 100644 index 000000000..704a7caab --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.youtube/access_manager.json @@ -0,0 +1,19 @@ +{ + "access_manager": { + "current_user": "0", + "developers": {}, + "last_origin": "plugin.video.youtube", + "users": { + "0": { + "access_token": "", + "id": "af085b4b77e247e5b4ba97d958567d9f", + "last_key_hash": "94aa8ff1499c32985b71113ea0b99b60", + "name": "Default", + "refresh_token": "", + "token_expires": -1, + "watch_history": "HL", + "watch_later": " WL" + } + } + } +} \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/api_keys.json b/.install/.kodi/userdata/addon_data/plugin.video.youtube/api_keys.json new file mode 100644 index 000000000..a242a0393 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.youtube/api_keys.json @@ -0,0 +1,10 @@ +{ + "keys": { + "developer": {}, + "personal": { + "api_key": "", + "client_id": "", + "client_secret": "" + } + } +} \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/cache.sqlite b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/cache.sqlite new file mode 100644 index 000000000..1bb496b63 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/cache.sqlite differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/data_cache.sqlite b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/data_cache.sqlite new file mode 100644 index 000000000..06a99d3c0 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/data_cache.sqlite differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/search.sqlite b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/search.sqlite new file mode 100644 index 000000000..cb89c44b5 Binary files /dev/null and b/.install/.kodi/userdata/addon_data/plugin.video.youtube/kodion/search.sqlite differ diff --git a/.install/.kodi/userdata/addon_data/plugin.video.youtube/settings.xml b/.install/.kodi/userdata/addon_data/plugin.video.youtube/settings.xml new file mode 100644 index 000000000..6d85e087d --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.video.youtube/settings.xml @@ -0,0 +1,102 @@ + + + 0 + true + false + false + 10 + 9 + true + + + 0.0.0.0 + + + + + + + + + + + + + + + + false + false + true + 50152 + 8 + true + 85 + false + + 0 + 10 + false + + false + 0 + false + 1 + + + + + 3 + false + false + + true + true + false + + + + + true + false + + true + true + true + + true + true + true + true + true + true + false + true + true + true + false + false + true + true + true + true + true + true + true + true + true + + true + + fr + 43.6046,1.4451 + 500 + true + false + false + false + FR + false + false + true + diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome index 97afcb005..e69de29bb 100644 --- a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome +++ b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/backuphome @@ -1 +0,0 @@ -2020-12-13-1428 diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db index 65bd225c3..185a5c628 100644 Binary files a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db and b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/cache.db differ diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/hide.backuphome b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/hide.backuphome new file mode 100644 index 000000000..c3f29566b --- /dev/null +++ b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/hide.backuphome @@ -0,0 +1,2 @@ +I01 +. \ No newline at end of file diff --git a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy index e69de29bb..1b8447b4a 100644 --- a/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy +++ b/.install/.kodi/userdata/addon_data/plugin.xbianconfig/xbiancopy @@ -0,0 +1 @@ +2020-12-16-1552 diff --git a/.install/.kodi/userdata/addon_data/service.xbmc.versioncheck/settings.xml b/.install/.kodi/userdata/addon_data/service.xbmc.versioncheck/settings.xml new file mode 100644 index 000000000..cccd6e845 --- /dev/null +++ b/.install/.kodi/userdata/addon_data/service.xbmc.versioncheck/settings.xml @@ -0,0 +1,6 @@ + + 18.9 stable + false + false + true + diff --git a/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml b/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml index cb2abfea8..2e550c7e8 100644 --- a/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml +++ b/.install/.kodi/userdata/addon_data/skin.estuary/settings.xml @@ -1,60 +1,68 @@ - false - false - false - false - false - false - false - false - false - true - false - false - false - true false - true - false - false - true - false - false false + true + true + false + false + false + true + false + false + false + false + true + false + false + true + true false - Refresh + true + false + false + true + false + false + false + Scan + XBian System Backup + inetd + File - Reloading values for wlan0 - - - - DHCP - - Click to load... - 5.4.75 - Click to load... - Unknown - Cliquer pour charger - requis - File - Cliquer pour charger - Click to load... - Cliquer pour charger - - DHCP - Cliquer pour charger - - eth0 - Cliquer pour charger + wlan2 + Click to load... + + 1 + + Cliquer pour charger Cliquer pour charger - - daily - daily - daily - Device - daily - inetd - XBian System Backup + Cliquer pour charger + File + Cliquer pour charger + + Cliquer pour charger + + Cliquer pour charger + + requis + inetd + DHCP + DHCP + + 5.4.75 + + + daily + requis + daily + + Unknown + daily + Recherche des réseaux disponibles + daily + + + diff --git a/.install/.kodi/userdata/favourites.xml b/.install/.kodi/userdata/favourites.xml index 2be744fbb..c32380fde 100644 --- a/.install/.kodi/userdata/favourites.xml +++ b/.install/.kodi/userdata/favourites.xml @@ -1 +1,3 @@ - + + ActivateWindow(10025,"plugin://plugin.video.vstream/?function=load&sFav=load&site=astroport&siteUrl=http%3a%2f%2fvenom&title=ASTROPORT%20_PROFIL_%20(_LOGIN_)%20(_MDP_)",return) + diff --git a/.install/.kodi/userdata/guisettings.xml b/.install/.kodi/userdata/guisettings.xml index cd12437af..3890d1553 100644 --- a/.install/.kodi/userdata/guisettings.xml +++ b/.install/.kodi/userdata/guisettings.xml @@ -12,7 +12,7 @@ false false 2 - ALSA:default + PI:Both 2 2 false @@ -77,7 +77,7 @@ true fr false - English QWERTY + French AZERTY mediadefault DEFAULT France @@ -89,8 +89,8 @@ original regional regional - Etc/UTC - + Europe/Paris + France regional false Default @@ -156,7 +156,7 @@ 0 false - 14 + 17 true true true @@ -209,7 +209,7 @@ false true XBian - f884d8e4-f4f7-43da-9dc0-451d91b436d6 + ca06be1c-dd53-4909-aef9-1cae7fdcf17a false 25 true @@ -226,7 +226,7 @@ false true - 8080 + 8181 false xbmc webinterface.default @@ -249,7 +249,7 @@ false arial.ttf 28 - English + English,French false false @@ -442,12 +442,12 @@ - 3 + 2 0 true - 1591 + 53439 @@ -490,13 +490,13 @@ 0 0 - false + true false 0 diff --git a/.install/.kodi/userdata/sources.xml b/.install/.kodi/userdata/sources.xml index 686f393e1..40012dcb4 100644 --- a/.install/.kodi/userdata/sources.xml +++ b/.install/.kodi/userdata/sources.xml @@ -20,7 +20,7 @@ SuperRepo.org Virtual Disk - http://srp.nu/jarvis/ + http://srp.nu/leia/ true